コード例 #1
0
ファイル: ping.py プロジェクト: Saectar/bitnodes
def init_settings(argv):
    """
    Populates SETTINGS with key-value pairs from configuration file.
    """
    conf = ConfigParser()
    conf.read(argv[1])
    SETTINGS['logfile'] = conf.get('ping', 'logfile')
    SETTINGS['workers'] = conf.getint('ping', 'workers')
    SETTINGS['debug'] = conf.getboolean('ping', 'debug')
    SETTINGS['source_address'] = conf.get('ping', 'source_address')
    SETTINGS['protocol_version'] = conf.getint('ping', 'protocol_version')
    SETTINGS['user_agent'] = conf.get('ping', 'user_agent')
    SETTINGS['services'] = conf.getint('ping', 'services')
    SETTINGS['relay'] = conf.getint('ping', 'relay')
    SETTINGS['socket_timeout'] = conf.getint('ping', 'socket_timeout')
    SETTINGS['cron_delay'] = conf.getint('ping', 'cron_delay')
    SETTINGS['ttl'] = conf.getint('ping', 'ttl')
    SETTINGS['ipv6_prefix'] = conf.getint('ping', 'ipv6_prefix')
    SETTINGS['nodes_per_ipv6_prefix'] = conf.getint('ping',
                                                    'nodes_per_ipv6_prefix')

    SETTINGS['onion'] = conf.getboolean('ping', 'onion')
    SETTINGS['tor_proxy'] = None
    if SETTINGS['onion']:
        tor_proxy = conf.get('ping', 'tor_proxy').split(":")
        SETTINGS['tor_proxy'] = (tor_proxy[0], int(tor_proxy[1]))

    SETTINGS['crawl_dir'] = conf.get('ping', 'crawl_dir')
    if not os.path.exists(SETTINGS['crawl_dir']):
        os.makedirs(SETTINGS['crawl_dir'])

    # Set to True for master process
    SETTINGS['master'] = argv[2] == "master"
コード例 #2
0
ファイル: config_reader.py プロジェクト: AmiLiY/astoptool
class ConfigReader(object):
    """
    为傲世堂的游戏项目配置文件定制的配置读取类。
    陈超写的arg.gameOption耦合性太强,只能在bible内使用。
    但是配置文件的结构设计的很合理。

    此类就是根据原来的结构设计重新写的解耦并且适用性更广的类。

    Example::

        conf = ConfigReader(game, region)
        ip = conf.get("mobile_www_ip")
        if conf.has_option("mobile_www_port")
            port = conf.getint("mobile_www_port")     
    """
    def __init__(self, game, section, conf_dir='/app/opbin/work/bible/conf'):
        self.game = game
        self.section = section
        self.conf_file = '{}/{}.conf'.format(conf_dir.rstrip('/'), self.game)
        self.config = ConfigParser()
        self.config.read(self.conf_file)
        self.has_section = self.config.has_section(self.section)

    def has_option(self, option):
        return self._has_option(self.section, option) or self._has_option('common', option)

    def _has_option(self, section, option):
        return self.config.has_option(section, option)

    def get(self, option, raw=0, var=None):
        if self._has_option(self.section, option):
            return self.config.get(self.section, option, raw, var)
        elif self._has_option('common', option):
            return self.config.get('common', option, raw, var)
        else:
            raise Exception("Can't find option: {} in {}".format(option, self.conf_file))

    def getint(self, option):
        if self._has_option(self.section, option):
            return self.config.getint(self.section, option)
        elif self._has_option('common', option):
            return self.config.getint('common', option)
        else:
            raise Exception("Can't find option: {} in {}".format(option, self.conf_file))

    def getfloat(self, option):
        if self._has_option(self.section, option):
            return self.config.getfloat(self.section, option)
        elif self._has_option('common', option):
            return self.config.getfloat('common', option)
        else:
            raise Exception("Can't find option: {} in {}".format(option, self.conf_file))

    def getboolean(self, option):
        if self._has_option(self.section, option):
            return self.config.getboolean(self.section, option)
        elif self._has_option('common', option):
            return self.config.getboolean('common', option)
        else:
            raise Exception("Can't find option: {} in {}".format(option, self.conf_file))
コード例 #3
0
ファイル: config.py プロジェクト: spack/nodetools
    def get_value(self, section, option):
        """
        Retourne la valeur de l'option contenue dans la section passée en
        paramètre.
        """

        # On travaille sur le nom de la section parente
        # au cas où s'il s'agit d'une sous-section.
        parent_section = self.get_parent_section_name(section)


        # On vérifie d'abord que la section existe.
        if self.__spec_sections.has_key(parent_section):
            # Puis on récupère la spécification de la section.
            section_spec = self.__spec_sections.get(parent_section)
            option_type  = None

            # On parcours les options de la spécification à la recherche
            # du type de la valeur de l'option que l'on souhaite obtenir.
            for option_spec in section_spec[2]:
                if option_spec[0] == option:
                    option_type = option_spec[1]

            # Introuvable dans les options de la section ?
            # On regarde dans ceux de la sous-section si elle existe.
            if self.__spec_has_subsection(parent_section):
                for sub_option_spec in section_spec[3]:
                    if sub_option_spec[0] == option:
                        option_type = sub_option_spec[1]


            # On appelle la fonction qui va bien en fonction du type à obtenir.
            #
            # Les sous-sections héritent des options de leur section parente.
            # Si l'option n'existe pas dans la section, il doit sûrement s'agir
            # d'une sous-section. On cherche alors l'option dans la section
            # parente.
            if option_type == 'string':
                try:
                    return ConfigParser.get(self, section, option)
                except NoOptionError:
                    return ConfigParser.get(self, parent_section, option)

            if option_type == 'int':
                try:
                    return ConfigParser.getint(self, section, option)
                except NoOptionError:
                    return ConfigParser.getint(self, parent_section, option)

            if option_type == 'bool':
                try:
                    return ConfigParser.getboolean(self, section, option)
                except NoOptionError:
                    return ConfigParser.getboolean(self, parent_section, option)


            return None
        else:
            raise NameError("Invalid section name: '%(section)s'." % \
                            {'section': section})
コード例 #4
0
def config():
    global video_format
    global resolution
    configr = ConfigParser()
    configr.read('settings.ini')
    quality = configr.get('SETTINGS', 'video_quality')
    qualities = {'android': ['107', '71'], '360p': ['106', '60'], '480p': ['106', '61'],
                 '720p': ['106', '62'], '1080p': ['108', '80'], 'highest': ['0', '0']}
    video_format = qualities[quality][0]
    resolution = qualities[quality][1]
    global lang
    global lang2
    lang = configr.get('SETTINGS', 'language')
    lang2 = configr.get('SETTINGS', 'language2')
    langd = {'Espanol_Espana': u'Español (Espana)', 'Francais': u'Français (France)', 'Portugues': u'Português (Brasil)',
            'English': u'English', 'Espanol': u'Español', 'Turkce': u'Türkçe', 'Italiano': u'Italiano',
            'Arabic': u'العربية', 'Deutsch': u'Deutsch'}
    lang = langd[lang]
    lang2 = langd[lang2]
    forcesub = configr.getboolean('SETTINGS', 'forcesubtitle')
    global forceusa
    forceusa = configr.getboolean('SETTINGS', 'forceusa')
    global localizecookies
    localizecookies = configr.getboolean('SETTINGS', 'localizecookies')
    onlymainsub = configr.getboolean('SETTINGS', 'onlymainsub')
    return [lang, lang2, forcesub, forceusa, localizecookies, quality, onlymainsub]
コード例 #5
0
ファイル: checkinDAO.py プロジェクト: seashby10/papa-tools
def canCheckin(filePath):
    """
    @returns: True if destination is not locked by another user
        AND this checkin will not overwrite a newer version
    """
    toCheckin = os.path.join(getUserCheckoutDir(), os.path.basename(os.path.dirname(filePath)))
    chkoutInfo = ConfigParser()
    chkoutInfo.read(os.path.join(toCheckin, ".checkoutInfo"))
    chkInDest = chkoutInfo.get("Checkout", "checkedoutfrom")
    version = chkoutInfo.getint("Checkout", "version")
    lockedbyme = chkoutInfo.getboolean("Checkout", "lockedbyme") # currently we call it "lockedbyme"... but it's true for everyone, no matter what. Not a particuarly good name.
    
    nodeInfo = ConfigParser()
    nodeInfo.read(os.path.join(chkInDest, ".nodeInfo"))
    locked = nodeInfo.getboolean("Versioning", "locked") # This actually checks if it is locked.
    latestVersion = nodeInfo.getint("Versioning", "latestversion")
    
    result = True
    if lockedbyme == False:
        if locked == True:
            result = False
        if version < latestVersion:
            result = False
    
    return result
コード例 #6
0
ファイル: runner.py プロジェクト: Sigterm-no/mnemosyne
def parse_config(config_file):
    if not os.path.isfile(config_file):
        sys.exit("Could not find configuration file: {0}".format(config_file))

    parser = ConfigParser()
    parser.read(config_file)

    log_file = None
    loggly_token = None

    if parser.getboolean('file_log', 'enabled'):
        log_file = parser.get('file_log', 'file')

    do_logging(log_file, loggly_token)

    config = {}

    if parser.getboolean('loggly_log', 'enabled'):
        config['loggly_token'] = parser.get('loggly_log', 'token')

    config['mongo_db'] = parser.get('mongodb', 'database')

    config['hpf_feeds'] = parser.get('hpfriends', 'channels').split(',')
    config['hpf_ident'] = parser.get('hpfriends', 'ident')
    config['hpf_secret'] = parser.get('hpfriends', 'secret')
    config['hpf_port'] = parser.getint('hpfriends', 'port')
    config['hpf_host'] = parser.get('hpfriends', 'host')

    config['webapi_port'] = parser.getint('webapi', 'port')
    config['webapi_host'] = parser.get('webapi', 'host')

    return config
コード例 #7
0
ファイル: main.py プロジェクト: fieryzig/sumo
def readIni(nb):
    global K, N, cut, gui, distrWE, distrNS, vehphWEA, vehphNSA, maxSumFlow, tlType, intergreenLength, GSum
    global phaseMinWE, phaseMaxWE, phaseMinNS, phaseMaxNS, maxGap, detPos
    filename = 'input' + str(nb).zfill(2) + '.ini'
    ini = ConfigParser()
    ini.read(filename)

    K = ini.getint("general", "K")
    N = ini.getint("general", "N")

    cut = ini.getboolean("general", "cut")

    gui = ini.getboolean("general", "gui")

    distrWE = ini.get("demand", "distrWE")
    distrNS = ini.get("demand", "distrNS")

    vehphWEA = eval(ini.get("demand", "vehphWEA"))
    vehphNSA = eval(ini.get("demand", "vehphNSA"))

    maxSumFlow = ini.getint("demand", "maxSumFlow")

    tlType = ini.get("TL", "tlType")

    intergreenLength = ini.getint("TL", "intergreenLength")
    GSum = ini.getfloat("TL", "GSum")

    [phaseMinWE, phaseMaxWE] = eval(ini.get("TL", "phaseMinMaxWE"))
    [phaseMinNS, phaseMaxNS] = eval(ini.get("TL", "phaseMinMaxNS"))
    maxGap = ini.getfloat("TL", "maxGap")
    detPos = ini.getfloat("TL", "detPos")

    return filename
コード例 #8
0
ファイル: crawl.py プロジェクト: neurocis/PiggyNodes
def init_settings(argv):
    """
    Populates SETTINGS with key-value pairs from configuration file.
    """
    conf = ConfigParser()
    conf.read(argv[1])
    SETTINGS['logfile'] = conf.get('crawl', 'logfile')
    SETTINGS['seeders'] = conf.get('crawl', 'seeders').strip().split("\n")
    SETTINGS['workers'] = conf.getint('crawl', 'workers')
    SETTINGS['debug'] = conf.getboolean('crawl', 'debug')
    SETTINGS['user_agent'] = conf.get('crawl', 'user_agent')
    SETTINGS['socket_timeout'] = conf.getint('crawl', 'socket_timeout')
    SETTINGS['cron_delay'] = conf.getint('crawl', 'cron_delay')
    SETTINGS['max_age'] = conf.getint('crawl', 'max_age')
    SETTINGS['ipv6'] = conf.getboolean('crawl', 'ipv6')
    exclude_nodes = conf.get('crawl', 'exclude_nodes').strip().split("\n")
    exclude_networks = conf.get('crawl',
                                'exclude_networks').strip().split("\n")
    for network in exclude_networks:
        exclude_nodes.extend(
            [str(address) for address in list(ip_network(unicode(network)))])
    SETTINGS['exclude_nodes'] = set(exclude_nodes)
    SETTINGS['crawl_dir'] = conf.get('crawl', 'crawl_dir')
    if not os.path.exists(SETTINGS['crawl_dir']):
        os.makedirs(SETTINGS['crawl_dir'])
    SETTINGS['master'] = argv[2] == "master"
コード例 #9
0
    def run(self):
        """

        """
        try:
            cfg = ConfigParser()
            re = cfg.read(CONFIG_FILE)
            if CONFIG_FILE not in re:
                self.error_parse_config()
        except Exception:
            self.error_parse_config()

        appProcess = list()
        for i in cfg.sections():
            print "Starting push process for App %s" % cfg.get(i, 'app_name')
            p = Process(target=runApp, args=(cfg.getboolean(i, 'app_sandbox'),
                                             cfg.get(i, 'app_cert'),
                                             cfg.get(i, 'app_key'),
                                             cfg.get(i,'driver'),
                                             cfg.get(i, 'queue_host'),
                                             cfg.getint(i,'queue_port'),
                                             cfg.get(i, 'queue_db_name'),
                                             cfg.get(i, 'queue_username'),
                                             cfg.get(i, 'queue_password'),
                                             cfg.get(i, 'app_queue_name'),
                                             cfg.get(i, 'app_name'),
                                             cfg.getboolean(i,'debug'),
                                             cfg.get(i,'feedback_callback'),))
            appProcess.append(p)
            p.name = cfg.get(i, 'app_name')
            p.daemon = True
            p.start()

        for p in appProcess:
            p.join()
コード例 #10
0
def canCheckin(toCheckin):
	"""
	@returns: True if destination is not locked by another user
		AND this checkin will not overwrite a newer version
	"""
	chkoutInfo = ConfigParser()
	chkoutInfo.read(os.path.join(toCheckin, ".checkoutInfo"))
	chkInDest = chkoutInfo.get("Checkout", "checkedoutfrom")
	version = chkoutInfo.getint("Checkout", "version")
	lockedbyme = chkoutInfo.getboolean("Checkout", "lockedbyme")
	
	nodeInfo = ConfigParser()
	nodeInfo.read(os.path.join(chkInDest, ".nodeInfo"))
	locked = nodeInfo.getboolean("Versioning", "locked")
	latestVersion = nodeInfo.getint("Versioning", "latestversion")
	
	#TODO raise different exceptions to give override options to the user
	result = True
	if lockedbyme == False:
		if locked == True:
			result = False
		if version < latestVersion:
			result = False
	
	return result
コード例 #11
0
ファイル: artlas.py プロジェクト: isca0/ARTLAS
    def get_conf(self, config_file):
        config = ConfigParser()
        config.read(config_file)

        # Telegram
        self.conf['api'] = config.get('Telegram','api')
        self.conf['group_id'] = int(config.get('Telegram','group_id'))
        # One should use getboolean to fetch boolean values, otherwise they will always be True unless empty
        self.conf['telegram_enable'] = config.getboolean('Telegram','enable')

        # Zabbix
        self.conf['server_name'] = config.get('Zabbix','server_name')
        self.conf['agentd_config'] = config.get('Zabbix','agentd_config')
        self.conf['zabbix_advantage_keys'] = config.getboolean('Zabbix','enable_advantage_keys')
        self.conf['zabbix_enable'] = config.getboolean('Zabbix','enable')

        # Apache
        self.conf['apache_log'] = config.get('General','apache_log')
        self.conf['rules'] = config.get('General','rules')
        self.conf['apache_mask'] = config.get('General','apache_mask')
        self.conf['vhost_enable'] = config.getboolean('General','vhost_enable')

        # CEF_Syslog
        self.conf['cef_syslog_enable'] = config.getboolean('CEF_Syslog','enable')
        self.conf['cef_syslog_server'] = config.get('CEF_Syslog','server_name')

        return self.conf
コード例 #12
0
ファイル: config.py プロジェクト: fredrander/podagg
def getConfig( configFile ):

	# defaults
	podlist = "~/.podagg/podlist"
	history = "~/.podagg/history"
	downloadPath = "~/podcasts/"
	separateDirs = True
	updateId3 = True
	latestEpisodeDir = None
	
	cfgFile = ConfigParser()
	cfgFile.read( configFile )
	if cfgFile.has_option( "files", "podlist" ):
		podlist = cfgFile.get( "files", "podlist" )
	if cfgFile.has_option( "files", "history" ):
		history = cfgFile.get( "files", "history" )
	if cfgFile.has_option( "paths", "download_dir" ):
		downloadPath = cfgFile.get( "paths", "download_dir" )
	if cfgFile.has_option( "paths", "separate_dirs" ):
		separateDirs = cfgFile.getboolean( "paths", "separate_dirs" )
	if cfgFile.has_option( "misc", "update_id3" ):
		updateId3 = cfgFile.getboolean( "misc", "update_id3" )
	if cfgFile.has_option( "paths", "latest_episode_dir" ):
		latestEpisodeDir = cfgFile.get( "paths", "latest_episode_dir" )

	# expand paths to support ~	
	podlist = os.path.expanduser( podlist )
	history = os.path.expanduser( history )
	downloadPath = os.path.expanduser( downloadPath )
	if latestEpisodeDir != None:
		latestEpisodeDir = os.path.expanduser( latestEpisodeDir )

	result = Config( podlist = podlist, history = history, downloadPath = downloadPath, separateDirs = separateDirs, updateId3 = updateId3, latestEpisodeDir = latestEpisodeDir )

	return result
コード例 #13
0
ファイル: ini.py プロジェクト: deloittem/irma-common
    def __init__(self, cfg_file, template):
        """
        @param cfg_file: file path of the configuration file.
        @param template: list of tuples with {section:(key_name, key_type)}
        """
        config = ConfigParser()
        config.read(cfg_file)

        # load configuration file
        for section in config.sections():
            setattr(self, section, ConfigurationSection())
            for name in config.options(section):
                try:
                    value = config.getboolean(section, name)
                except ValueError:
                    try:
                        value = config.getint(section, name)
                    except ValueError:
                        value = config.get(section, name)
                setattr(getattr(self, section), name, value)

        # override with default values from template
        for section in template.keys():
            # setattr even if section is not present in ini file
            # as it may have default value, check at value fetching
            setattr(self, section, ConfigurationSection())
            if type(template[section]) != list:
                reason = "Malformed Template section type should be list"
                raise IrmaConfigurationError(reason)
            for (key_name, key_type, key_def_value) in template[section]:
                if not config.has_option(section, key_name):
                    # If key not found but a default value exists, set it
                    if key_def_value is not None:
                        setattr(getattr(self, section),
                                key_name,
                                key_def_value)
                        continue
                    else:
                        reason = ("file {0} ".format(cfg_file) +
                                  "missing section {0} ".format(section) +
                                  "key {0}".format(key_name))
                        raise IrmaConfigurationError(reason)
                try:
                    if key_type == self.boolean:
                        value = config.getboolean(section, key_name)
                    elif key_type == self.integer:
                        value = config.getint(section, key_name)
                    else:
                        value = config.get(section, key_name)
                    setattr(getattr(self, section), key_name, value)
                except ValueError:
                    reason = ("file {0} ".format(cfg_file) +
                              "missing section {0} ".format(section) +
                              "Wrong type for key {0}".format(key_name))
                    raise IrmaConfigurationError(reason)
コード例 #14
0
ファイル: checkinDAO.py プロジェクト: seashby10/papa-tools
def checkin(asset, comment):
    """
    Checks a folder back in as the newest version
    @precondition: toCheckin is a valid path
    @precondition: canCheckin() == True OR all conflicts have been resolved
    """
    print "Checking in asset ", asset

    # First, we'll have to set the comment in here.
    assetToCheckIn = os.path.join(getUserCheckoutDir(), os.path.basename(os.path.dirname(asset)))
    setComment(assetToCheckIn, comment)

    # Then we configure everything that is in here.

    # print toCheckin
    chkoutInfo = ConfigParser()
    chkoutInfo.read(os.path.join(assetToCheckIn, ".checkoutInfo"))
    chkInDest = chkoutInfo.get("Checkout", "checkedoutfrom")
    lockedbyme = chkoutInfo.getboolean("Checkout", "lockedbyme")
    
    nodeInfo = ConfigParser()
    nodeInfo.read(os.path.join(chkInDest, ".nodeInfo"))
    locked = nodeInfo.getboolean("Versioning", "locked")
    toKeep = nodeInfo.getint("Versioning", "Versionstokeep")
    newVersion = nodeInfo.getint("Versioning", "latestversion") + 1
    newVersionPath = os.path.join(chkInDest, "src", "v"+("%03d" % newVersion))
    
    if not canCheckin(asset):
        print "Can not overwrite locked folder."
        raise Exception("Can not overwrite locked folder.")
    
    # Checkin
    shutil.copytree(assetToCheckIn, newVersionPath)

    # And fix permissions for the new version asset so that everyone can access it.
    os.system('chmod 774 -R '+ newVersionPath)
    
    timestamp = time.strftime("%a, %d %b %Y %I:%M:%S %p", time.localtime())
    nodeInfo.set("Versioning", "lastcheckintime", timestamp)
    nodeInfo.set("Versioning", "lastcheckinuser", getUsername())
    nodeInfo.set("Versioning", "latestversion", str(newVersion))
    nodeInfo.set("Versioning", "locked", "False")
    amu._writeConfigFile(os.path.join(chkInDest, ".nodeInfo"), nodeInfo)
    
    #print glob.glob(os.path.join(chkInDest, "src", "*"))
    if toKeep > 0:
        amu.purge(os.path.join(chkInDest, "src"), nodeInfo, newVersion - toKeep)
        amu._writeConfigFile(os.path.join(chkInDest, ".nodeInfo"), nodeInfo)

    # Clean up
    shutil.rmtree(assetToCheckIn)
    os.remove(os.path.join(newVersionPath, ".checkoutInfo"))

    return chkInDest
コード例 #15
0
 def __init__(self):
     config = ConfigParser()
     config.read(App.__config__)
         
     self.host = config.get('Host','host')
     self.port = config.getint('Host','port')     
     self.logger = config.get('Log','logger')
     self.log_debug = config.getboolean('Log','debug')
     self.log_screen = config.getboolean('Log','screen')
             
     Storage.root = self.root = config.get('Path','root')
     Storage.loadConfig(App.dirname + config.get('Path', 's3_ini'))
コード例 #16
0
ファイル: main.py プロジェクト: diegc/plasma-flux
	def init(self):
		self.button = Plasma.IconWidget(self.parent)
		self.iconStopped = KIcon(ICON_STOPPED)
		self.iconRunning = KIcon(ICON_RUNNING)
		self.iconUnknown = KIcon(ICON_UNKNOWN)
		self.pid = None
		self.subp = None
		self.waiting = False

		self.setHasConfigurationInterface(True)
		#set size of Plasmoid
		self.resize(50, 50)
		self.setAspectRatioMode(Plasma.KeepAspectRatio)
		self.setBackgroundHints(Plasma.Applet.DefaultBackground)

		self.theme = Plasma.Svg(self)
		self.theme.setImagePath(THEME)
		self.layout = QGraphicsGridLayout(self.applet)
		self.layout.setContentsMargins(3,3,3,3)
		self.setMinimumSize(10,10)

		#set timer interval
		self.timer = self.startTimer(REFRESH)
		self.button.setIcon(self.iconUnknown)
		QObject.connect(self.button, SIGNAL('clicked()'), self.toggle)
		self.appletDestroyed.connect(self.destroy)

		self.cfgfile = '.plasma-flux.cfg'
		strFile = os.path.join(os.path.expanduser('~'), self.cfgfile)
		if os.path.exists(strFile):
			cfgParser = ConfigParser()
			cfgFile = open(strFile)
			cfgParser.readfp(cfgFile)
			try:
				self.lon = cfgParser.getfloat('settings', 'lon')
				self.lat = cfgParser.getfloat('settings', 'lat')
				self.nighttmp = cfgParser.getint('settings', 'nighttmp')
				self.daytmp = cfgParser.getint('settings', 'daytmp')
				self.smooth = cfgParser.getboolean('settings', 'smooth')
				self.program = cfgParser.get('settings', 'program')
				self.mode = cfgParser.get('settings', 'mode')
				self.gamma = cfgParser.get('settings', 'gamma')
				self.auto = cfgParser.getboolean('settings', 'auto')
				cfgFile.close()
			except:
				self.defaultOptions()
		else:
			self.defaultOptions()
		self.updateStatus()
		if self.auto and self.checkStatus() == 'Stopped':
			print('Auto-starting %s' % self.program)
			self.toggle()
コード例 #17
0
ファイル: crawl.py プロジェクト: leanklass/bitnodes
def init_settings(argv):
    """
    Populates SETTINGS with key-value pairs from configuration file.
    """
    conf = ConfigParser()
    conf.read(argv[1])
    SETTINGS['logfile'] = conf.get('crawl', 'logfile')
    SETTINGS['network'] = conf.get('crawl', 'network')
    SETTINGS['seeders'] = conf.get('crawl', 'seeders').strip().split("\n")
    SETTINGS['workers'] = conf.getint('crawl', 'workers')
    SETTINGS['debug'] = conf.getboolean('crawl', 'debug')
    SETTINGS['source_address'] = conf.get('crawl', 'source_address')
    SETTINGS['protocol_version'] = conf.getint('crawl', 'protocol_version')
    SETTINGS['user_agent'] = conf.get('crawl', 'user_agent')
    SETTINGS['services'] = conf.getint('crawl', 'services')
    SETTINGS['relay'] = conf.getint('crawl', 'relay')
    SETTINGS['socket_timeout'] = conf.getint('crawl', 'socket_timeout')
    SETTINGS['cron_delay'] = conf.getint('crawl', 'cron_delay')
    SETTINGS['snapshot_delay'] = conf.getint('crawl', 'snapshot_delay')
    SETTINGS['max_age'] = conf.getint('crawl', 'max_age')
    SETTINGS['ipv6'] = conf.getboolean('crawl', 'ipv6')
    SETTINGS['ipv6_prefix'] = conf.getint('crawl', 'ipv6_prefix')
    SETTINGS['nodes_per_ipv6_prefix'] = conf.getint('crawl',
                                                    'nodes_per_ipv6_prefix')

    SETTINGS['exclude_ipv4_networks'] = list_excluded_networks(
        conf.get('crawl', 'exclude_ipv4_networks'))
    SETTINGS['exclude_ipv6_networks'] = list_excluded_networks(
        conf.get('crawl', 'exclude_ipv6_networks'))

    SETTINGS['exclude_ipv4_bogons'] = conf.getboolean('crawl',
                                                      'exclude_ipv4_bogons')

    SETTINGS['initial_exclude_ipv4_networks'] = \
        SETTINGS['exclude_ipv4_networks']

    SETTINGS['onion'] = conf.getboolean('crawl', 'onion')
    SETTINGS['tor_proxy'] = None
    if SETTINGS['onion']:
        tor_proxy = conf.get('crawl', 'tor_proxy').split(":")
        SETTINGS['tor_proxy'] = (tor_proxy[0], int(tor_proxy[1]))
    SETTINGS['onion_nodes'] = conf.get('crawl',
                                       'onion_nodes').strip().split("\n")

    SETTINGS['include_checked'] = conf.getboolean('crawl', 'include_checked')

    SETTINGS['crawl_dir'] = conf.get('crawl', 'crawl_dir')
    if not os.path.exists(SETTINGS['crawl_dir']):
        os.makedirs(SETTINGS['crawl_dir'])

    # Set to True for master process
    SETTINGS['master'] = argv[2] == "master"
コード例 #18
0
ファイル: world.py プロジェクト: oneshoturdone/baskit
 def get_config(self):
     '''get_config
     Gets the stored configuration from the config file applies those
     settings to the object.
     '''
     section = 'World: %s' % self.name
     conf = ConfigParser()
     conf.read(self._config_file)
     if not conf.has_section(section):
         self.set_config()
     self.env = conf.get('Server', 'environment')
     self.ramdisk = conf.getboolean(section, 'ramdisk')
     self.automount = conf.getboolean(section, 'automount')
コード例 #19
0
ファイル: csv2qif.py プロジェクト: igortg/convert2qif
 def ReadCfg(self, cfg_filename):
     cfg_file = file(cfg_filename)
     cp = ConfigParser()
     cp.readfp(cfg_file)
     self._delimiter = cp.get('CSV', 'delimiter')
     self._date_format = cp.get('CSV', 'date')
     self._csv_columns = cp.get('CSV', 'columns')
     if cp.has_option('CSV', 'encoding'):
         self._encoding = cp.get('CSV', 'encoding')
     if cp.has_option('CSV', 'locale-decimal-sep'):
         self._locale_decimal_sep = cp.getboolean('CSV', 'locale-decimal-sep')
     if cp.has_option('CSV', 'negate-amount'):
         self._negate_amount = cp.getboolean('CSV', 'negate-amount')
コード例 #20
0
def fn(options, args):
    cfg = ConfigParser()
    cfg.read('LoginAccount.txt')

    flag = 'mailClient'
    keys = ('host', 'port', 'user', 'pass', 'fr', 'to', 'debuglevel', 'login', 'starttls')
    stor = {}
    for k in keys: stor.setdefault(k, '')

    try:
        stor['host'] = cfg.get(flag, 'host')
        stor['port'] = cfg.getint(flag, 'port')
        stor['user'] = cfg.get(flag, 'user')
        stor['pass'] = cfg.get(flag, 'pass')
        stor['fr'] = cfg.get(flag, 'fr')
        stor['to'] = cfg.get(flag, 'to')
        stor['debuglevel'] = cfg.getboolean(flag, 'debuglevel')
        stor['login'] = cfg.getboolean(flag, 'login')
        stor['starttls'] = cfg.getboolean(flag, 'starttls')
    except NoOptionError: pass

    if options.addr:
        stor['to'] = options.addr
   
    s = get_smtp_client(stor)
    for arg in args:
        sys.stdout.write('sending... ' + arg)
        msg = MIMEMultipart()
        msg['From'] = stor['fr']
        msg['Subject'] = arg
        msg['To'] = stor['to']
        msg.set_boundary('===== Baby, python is good =====')

        if not options.atta:
            data = MIMEBase('application', 'octet-stream')
            data.set_payload(open(arg, 'rb').read())
            Encoders.encode_base64(data)
            data.add_header('Content-Disposition', 'attachment', filename = arg)
            msg.attach(data)
        else:
            b = '''<html><head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
</head><body><pre>'''
            b += open(arg, 'rb').read()
            b += '''</pre></body></html>'''

            body = MIMEText(b, _subtype = 'html', _charset = 'utf-8')
            msg.attach(body)
        sendmail(s, msg)
        sys.stdout.write(' done.\n')
    s.close()
コード例 #21
0
ファイル: ocr.py プロジェクト: porcpine1967/ocr-proofreader
def clean(start_page, end_page):
    """ Batch cleans the pages in text/clean."""

    config = ConfigParser()
    config.read('book.cnf')
    try:
        clean_headers = config.getboolean('process', 'clean_headers')
    except NoOptionError:
        clean_headers = True
    try:
        join_lines = config.getboolean('process', 'join_lines')
    except NoOptionError:
        join_lines = True

    if clean_headers:
	print 'cleaning headers'
        remove_headers()
        if not config.has_section('process'):
            config.add_section('process')
        config.set('process', 'clean_headers', 'false')
        with open('book.cnf', 'wb') as f:
            config.write(f)
        lm =_loaded_aspell_line_manager(start_page, end_page)
        lm.quick_fix()
    elif join_lines:
	print 'joining lines'
        if not config.has_section('process'):
            config.add_section('process')
        config.set('process', 'join_lines', 'false')
        with open('book.cnf', 'wb') as f:
            config.write(f)
        lm =_loaded_file_line_manager(start_page, end_page)
        lm.join_lines()
    else:
        # if interrupted by keyboard, go ahead and write changes
        lang = get_lang()
#           spell_checker.FileConfiguredSpellChecker(lang, './dict.{}.pws'.format(lang)),
#           spell_checker.AspellSpellChecker(lang, './dict.{}.pws'.format(lang)),
        lm = line_manager.LineManager(
#           spell_checker.AspellSpellChecker(lang, './dict.{}.pws'.format(lang)),
            spell_checker.FileConfiguredSpellChecker(lang),
            start_page,
            end_page
            )
        lm.load('text/clean')
        try:
            lm.fix_lines()
        except KeyboardInterrupt:
            pass
    lm.write_pages('text/clean', False)
コード例 #22
0
ファイル: runner.py プロジェクト: GovCERT-CZ/mnemosyne
def parse_config(config_file):
    if not os.path.isfile(config_file):
        sys.exit("Could not find configuration file: {0}".format(config_file))

    parser = ConfigParser()
    parser.read(config_file)

    log_file = None
    loggly_token = None

    if parser.getboolean('file_log', 'enabled'):
        log_file = parser.get('file_log', 'file')

    do_logging(log_file, loggly_token)

    config = {}

    if parser.getboolean('loggly_log', 'enabled'):
        config['loggly_token'] = parser.get('loggly_log', 'token')

    config['mongo_db'] = parser.get('mongodb', 'database')

    if os.getenv("REMOTE_MONGO") == "true":
        config['mongo_host'] = os.getenv("MONGO_HOST")
        config['mongo_port'] = int(os.getenv("MONGO_PORT"))

        config['mongo_auth'] = False
        if os.getenv("MONGO_AUTH") == "true":
            config['mongo_auth'] = True
            config['mongo_user'] = os.getenv("MONGO_USER")
            config['mongo_password'] = os.getenv("MONGO_PASSWORD")
            config['mongo_auth_mechanism'] = os.getenv("MONGO_AUTH_MECHANISM")
    else:
        config['mongo_auth'] = False
        config['mongo_host'] = "127.0.0.1"
        config['mongo_port'] = 27017

    config['hpf_feeds'] = parser.get('hpfriends', 'channels').split(',')
    config['hpf_ident'] = parser.get('hpfriends', 'ident')
    config['hpf_secret'] = parser.get('hpfriends', 'secret')
    config['hpf_port'] = parser.getint('hpfriends', 'port')
    config['hpf_host'] = parser.get('hpfriends', 'host')

    config['webapi_port'] = parser.getint('webapi', 'port')
    config['webapi_host'] = parser.get('webapi', 'host')

    config['normalizer_ignore_rfc1918'] = parser.getboolean('normalizer', 'ignore_rfc1918')

    return config
コード例 #23
0
ファイル: settings.py プロジェクト: Deltares/dcs
 def __init__(self):
     if not os.path.exists('ilm.conf'):
         logging.error('we need a valid config, none found!')
         raise
     parser = ConfigParser()
     parser.read('ilm.conf')
     self.aws_region = parser.get('aws', 'region')
     self.aws_secret = parser.get('aws', 'secret_key')
     self.aws_access = parser.get('aws', 'access_key')
     self.aws_seqgrp = parser.get('aws', 'security_group')
     self.aws_req_max_wait = parser.get('aws', 'request_max_wait_time')
     self.auto_remove_failed = parser.getboolean('parameters', 'auto_remove_failed')
     self.recycle_workers = parser.getboolean('parameters', 'recycle_workers')
     self.max_instances = parser.getint('parameters', 'max_instances')
     self.max_storage = parser.getint('parameters', 'max_storage')
コード例 #24
0
ファイル: config.py プロジェクト: adaptee/pychmviewer
 def __init__(self):
     cfg = ConfigParser()
     try:
         cfg.read(globalcfgfile)
     except:
         pass
     if not cfg.has_section("userconfig"):
         self.loadlasttime = True
         self.openremote = True
         self.fontfamily = None
         self.fontsize = None
         self.__lastdir = home
         self.sengine_own = True
     if not cfg.has_section("searchext"):
         self.searchext = {"html": True, "htm": True, "js": False, "txt": False, "css": False}
     else:
         self.searchext = {}
         for a, b in cfg.items("searchext"):
             if b.lower() == "false":
                 self.searchext[a] = False
             else:
                 self.searchext[a] = True
     try:
         self.loadlasttime = cfg.getboolean("userconfig", "loadlasttime")
     except:
         self.lastlasttime = True
     try:
         self.sengine_own = cfg.getboolean("userconfig", "default_search_engine")
     except:
         self.sengine_own = True
     try:
         self.openremote = cfg.getboolean("userconfig", "openremote")
     except:
         self.openremote = True
     try:
         self.fontfamily = cfg.get("userconfig", "fontfamily").decode("utf-8")
         if self.fontfamily == "default":
             self.fontfamily = None
     except:
         self.fontfamily = None
     try:
         self.fontsize = cfg.getint("userconfig", "fontsize")
     except:
         self.fontsize = None
     try:
         self.__lastdir = cfg.get("userconfig", "lastdir")
     except:
         self.__lastdir = home
コード例 #25
0
ファイル: Config.py プロジェクト: jackpf/ossim-arc
 def getboolean(self, section, option):
     try:
         value = ConfigParser.getboolean(self, section, option)
     except ValueError: # not a boolean
         logger.warning("Value %s->%s is not a boolean" % (section, option))
         return False
     return value
コード例 #26
0
def isCheckedOut(dirPath):
	nodeInfo = os.path.join(dirPath, ".nodeInfo")
	if not os.path.exists(nodeInfo):
		return False
	cp = ConfigParser()
	cp.read(nodeInfo)
	return cp.getboolean("Versioning", "locked")
コード例 #27
0
ファイル: config.py プロジェクト: syncloud/diaspora
class UserConfig:

    def __init__(self, filename=default_user_config_file):
        self.parser = ConfigParser()
        self.parser.read(filename)
        self.filename = filename
        if not isfile(self.filename):
            self.parser.add_section('diaspora')
            self.set_activated(False)
            self.__save()
        else:
            self.parser.read(self.filename)

        if not self.parser.has_section('diaspora'):
            self.parser.add_section('diaspora')

    def is_installed(self):
        return self.parser.getboolean('diaspora', 'activated')

    def set_activated(self, value):
        self.parser.set('diaspora', 'activated', str(value))
        self.__save()

    def __save(self):
        with open(self.filename, 'wb') as f:
            self.parser.write(f)
コード例 #28
0
def cloneShot(src, src_name, dst, dst_name):
	src_cfg = ConfigParser()
	dst_cfg = ConfigParser()
	src_cfg.read(os.path.join(src, ".nodeInfo"))
	src_version = src_cfg.getint("Versioning", "latestversion")
	dst_cfg.read(os.path.join(dst, ".nodeInfo"))
	dst_version = dst_cfg.getint("Versioning", "latestversion")
	if dst_cfg.getboolean("Versioning", "locked"):
		return False
	src_path = os.path.join(src, "src", 'v'+"%03d" % src_version)
	src_filepath = os.path.join(src_path, src_name+'_animation.mb')
	print dst_version
	dst_path = os.path.join(dst, "src", 'v'+"%03d" % (dst_version+1))
	os.mkdir(dst_path)
	dst_filepath = os.path.join(dst_path, dst_name+'_animation.mb')
	print 'copying '+src_filepath+' to '+dst_filepath
	shutil.copyfile(src_filepath, dst_filepath)

	#write out new animation info
	timestamp = time.strftime("%a, %d %b %Y %I:%M:%S %p", time.localtime())
	user = getUsername()
	comment = 'copied from '+src_name
	dst_cfg.set("Versioning", "lastcheckintime", timestamp)
	dst_cfg.set("Versioning", "lastcheckinuser", user)
	dst_cfg.set("Versioning", "latestversion", str(dst_version+1))
	commentLine = user + ': ' + timestamp + ': ' + '"' + comment + '"' 
	dst_cfg.set("Comments", 'v' + "%03d" % (dst_version+1,), commentLine)	
	_writeConfigFile(os.path.join(dst, ".nodeInfo"), dst_cfg)
	return True
コード例 #29
0
ファイル: conf.py プロジェクト: leonworkshop/basin
  def readFrom(self, path, section):
    parser = ConfigParser()
    if not parser.read(path):
      raise CarbonConfigException("Failed to read config file %s" % path)

    if not parser.has_section(section):
      return

    for key, value in parser.items(section):
      key = key.upper()

      # Detect type from defaults dict
      if key in defaults:
        valueType = type(defaults[key])
      else:
        valueType = str

      if valueType is list:
        value = [v.strip() for v in value.split(',')]

      elif valueType is bool:
        value = parser.getboolean(section, key)

      else:
        # Attempt to figure out numeric types automatically
        try:
          value = int(value)
        except:
          try:
            value = float(value)
          except:
            pass

      self[key] = value
コード例 #30
0
def setVersion(dirPath, version):	
    """
    Sets the 'latest version' as the specified version and deletes later versions
    @precondition: dirPath is a valid path
    @precondition: version is an existing version
    @precondition: the folder has been checked out by the user

    @postcondition: the folder will be checked in and unlocked
    """

    chkoutInfo = ConfigParser()
    chkoutInfo.read(os.path.join(dirPath, ".checkoutInfo"))
    chkInDest = chkoutInfo.get("Checkout", "checkedoutfrom")
    lockedbyme = chkoutInfo.getboolean("Checkout", "lockedbyme")
    
    nodeInfo = ConfigParser()
    nodeInfo.read(os.path.join(chkInDest, ".nodeInfo"))
    newVersionPath = os.path.join(chkInDest, "src", "v"+str(version))

    if lockedbyme == False:
        print "Cannot overwrite locked folder."
        raise Exception("Can not overwrite locked folder.")
        
    # Set version
    timestamp = time.strftime("%a, %d %b %Y %I:%M:%S %p", time.localtime())
    nodeInfo.set("Versioning", "lastcheckintime", timestamp)
    nodeInfo.set("Versioning", "lastcheckinuser", getUsername())
    nodeInfo.set("Versioning", "latestversion", str(version))
    nodeInfo.set("Versioning", "locked", "False")
    _writeConfigFile(os.path.join(chkInDest, ".nodeInfo"), nodeInfo)
    
    # Clean up
    purgeAfter(os.path.join(chkInDest, "src"), version)
    shutil.rmtree(dirPath)
コード例 #31
0
class Config(object):
    def __init__(self, inifile=None, configs=None):
        if inifile is None:
            return None
        self.inifile = inifile
        self.cfg = ConfigParser()

        with FileLock(self.inifile):
            if exists(self.inifile):
                self.cfg.read(self.inifile)

            # initialize configurations
            default_configs = {} if configs is None else configs
            needupdate = False
            for sec, secdata in default_configs.items():
                if not self.cfg.has_section(sec):
                    self.cfg.add_section(sec)
                    needupdate = True
                for opt, val in secdata.items():
                    if not self.cfg.has_option(sec, opt):
                        self.cfg.set(sec, opt, val)
                        needupdate = True

            # update ini file
            if needupdate:
                self.update(False)

    def update(self, lock=True):
        if lock:
            flock = FileLock(self.inifile)
            flock.acquire()

        try:
            inifp = open(self.inifile, 'w')
            self.cfg.write(inifp)
            inifp.close()
            if lock:
                flock.release()
            return True
        except:
            if lock:
                flock.release()
            return False

    def has_option(self, section, option):
        return self.cfg.has_option(section, option)

    def remove_option(self, section, option):
        return self.cfg.remove_option(section, option)

    def get(self, section, option):
        if self.cfg.has_option(section, option):
            return self.cfg.get(section, option)
        else:
            return None

    def getboolean(self, section, option):
        return self.cfg.getboolean(section, option)

    def getint(self, section, option):
        return self.cfg.getint(section, option)

    def has_section(self, section):
        return self.cfg.has_section(section)

    def add_section(self, section):
        return self.cfg.add_section(section)

    def remove_section(self, section=None):
        if section is None:
            return False
        else:
            return self.cfg.remove_section(section)

    def set(self, section, option, value):
        try:
            self.cfg.set(section, option, value)
        except:
            return False
        return self.update()

    def get_section_list(self):
        '''Return a list of section names, excluding [DEFAULT]'''
        return self.cfg.sections()

    def get_option_list(self, section):
        '''Return a list of option names for the given section name.'''
        return self.cfg.options(section)

    def get_config_list(self):
        '''Return a list of all config for the given config file.'''
        config_list = []
        sections = self.cfg.sections()
        for section in sections:
            sec = {'section': section, 'option': {}}
            options = self.cfg.options(section)
            for key in options:
                sec['option'][key] = self.cfg.get(section, key)
            config_list.append(sec)
        return config_list

    def get_config(self):
        '''Return a dict of all config for the given config file.'''
        config = {}
        for section in self.cfg.sections():
            config[section] = {}
            for item in self.cfg.options(section):
                config[section][item] = self.cfg.get(section, item)
        return config

    def addsection(self, section, data):
        '''add one section'''
        try:
            if not self.cfg.has_section(section):
                self.cfg.add_section(section)
            for option, value in data.items():
                self.cfg.set(section, option, value)
            return self.update(False)
        except:
            return False

    def addsections(self, section):
        '''add some sections'''
        try:
            for sec, data in section.items():
                if not self.cfg.has_section(sec):
                    self.cfg.add_section(sec)
                for option, value in data.items():
                    self.cfg.set(sec, option, value)
            return self.update(False)
        except:
            return False
コード例 #32
0
ファイル: main.py プロジェクト: micmax93/AimApi
import time
from datetime import datetime

__author__ = 'micmax93'
from api import ApiConnection
from player import SinglePlayer
from loader import load_configs
from grid import PublishersList, ViewersGrid
from ConfigParser import ConfigParser
from logger import CsvLogger


_config = ConfigParser()
_config.read('config.ini')
_path = _config.get('global', 'path')
_mode = _config.getboolean('global', 'targeted')


publishers, _grid = load_configs(_path)
pub_list = PublishersList(_grid)

player = SinglePlayer()

api = ApiConnection(host='25.152.172.38')
log = CsvLogger('log.csv', ['date', 'video', 'viewers', 'value'])


def get_viewers_grid(viewers):
    grid = ViewersGrid()
    for viewer in viewers:
        grid.add(viewer.age, viewer.gender)
コード例 #33
0
def configure(ini):
    global runs, repeatevery, cutoff, delay, maxtime, logfilemax, outputmax, \
       make, makefile, affinitymask

    try:
        parser = ConfigParser()
        parser.read(ini)

        # override default directory locations
        for k, v in parser.items('dirs'):
            dirs[k] = normpath(expandvars(expanduser(v)))

        for k, v in parser.items('filters'):
            filters[k] = v.split()
        filters['onlydirs'] = frozenset(filters['onlydirs'])

        for k, v in parser.items('alias'):
            alias[k] = v.split()

        make = frozenset(parser.get('build', 'make').split())

        f = dirs['makefile'] if 'makefile' in dirs else defaultMakefile()
        makefile = normpath(expandvars(expanduser(f))) if f else None

        # compiler interpreter runtime location shell vars
        for k, v in parser.items('tools'):
            os.environ[k.upper()] = v

        commandlines.update(parser.items('commandlines'))

        for k, v in parser.items('testrange'):
            testrange[k] = v.split()

        for k, v in parser.items('testdata'):
            testdata[k] = v

        for k, v in parser.items('outputcheck'):
            outputcheck[k] = frozenset(v.split())

        for k, v in parser.items('ndiff_outputcheck'):
            ndiff_outputcheck[k] = v

        # test specific shell vars
        default = {}
        for each in filters['onlydirs']:
            if parser.has_section(each):
                d = {}
                for k, v in parser.items(each):
                    d[k.upper()] = v
                    default[k.upper()] = ''
                testenv[each] = d

        testenv['default'] = default

        s = 'measure'
        if parser.has_section(s):
            for o in parser.options(s):
                if o in ('runs'):
                    runs = parser.getint(s, o)
                elif o in ('repeatevery'):
                    repeatevery = parser.getboolean(s, o)
                elif o in ('cutoff'):
                    cutoff = parser.getint(s, o)
                elif o in ('delay'):
                    delay = parser.getfloat(s, o)
                elif o in ('maxtime'):
                    maxtime = parser.getint(s, o)
                elif o in ('logfilemax'):
                    logfilemax = parser.getint(s, o)
                elif o in ('outputmax'):
                    outputmax = parser.getint(s, o)
                elif o in ('affinitymask'):
                    affinitymask = parser.getint(s, o)

    except (NoSectionError, NoOptionError), e:
        if logger: logger.debug(e)
        print e, 'in', realpath(ini)
        sys.exit(2)
コード例 #34
0
    def config_load(self):

        config_file = self.install_dir + '/' + self.game_name + '/config.ini'
        config_parser = ConfigParser()
        config_parser.read(config_file)

        if not config_parser.has_section('Settings'):
            config_parser.add_section('Settings')

        if not config_parser.has_option('Settings', 'wine'):
            self.wine = 'global'
            config_parser.set('Settings', 'wine', str(self.wine))
        else:
            self.wine = config_parser.get('Settings', 'wine')

        if not config_parser.has_option('Settings', 'wine_path'):
            self.wine_path = global_wine_path
            config_parser.set('Settings', 'wine_path', str(self.wine_path))
        else:
            self.wine_path = config_parser.get('Settings', 'wine_path')

        if not config_parser.has_option('Settings', 'wine_version'):
            self.wine_version = global_wine_version
            config_parser.set('Settings', 'wine_version', str(self.wine_version))
        else:
            self.wine_version = config_parser.get('Settings', 'wine_version')

        if not config_parser.has_option('Settings', 'monitor'):
            self.monitor = global_monitor
            config_parser.set('Settings', 'monitor', str(self.monitor))
        else:
            self.monitor = config_parser.getint('Settings', 'monitor')

        if not config_parser.has_option('Settings', 'launcher'):
            self.launcher = True
            config_parser.set('Settings', 'launcher', str(self.launcher))
        else:
            self.launcher = config_parser.getboolean('Settings', 'launcher')

        if not config_parser.has_option('Settings', 'show_banner'):
            self.show_banner = True
            config_parser.set('Settings', 'show_banner', str(self.show_banner))
        else:
            self.show_banner = config_parser.getboolean('Settings', 'show_banner')

        if not config_parser.has_option('Settings', 'win_ver'):
            self.win_ver = 0
            config_parser.set('Settings', 'win_ver', str(self.win_ver))
        else:
            self.win_ver = config_parser.getint('Settings', 'win_ver')

        if not config_parser.has_option('Settings', 'virtual_desktop'):
            self.virtual_desktop = False
            config_parser.set('Settings', 'virtual_desktop', str(self.virtual_desktop))
        else:
            self.virtual_desktop = config_parser.getboolean('Settings', 'virtual_desktop')

        if not config_parser.has_option('Settings', 'virtual_desktop_width'):
            self.virtual_desktop_width = ''
            config_parser.set('Settings', 'virtual_desktop_width', str(self.virtual_desktop_width))
        else:
            self.virtual_desktop_width = config_parser.get('Settings', 'virtual_desktop_width')

        if not config_parser.has_option('Settings', 'virtual_desktop_height'):
            self.virtual_desktop_height = ''
            config_parser.set('Settings', 'virtual_desktop_height', str(self.virtual_desktop_height))
        else:
            self.virtual_desktop_height = config_parser.get('Settings', 'virtual_desktop_height')

        if not config_parser.has_option('Settings', 'mouse_capture'):
            self.mouse_capture = False
            config_parser.set('Settings', 'mouse_capture', str(self.mouse_capture))
        else:
            self.mouse_capture = config_parser.getboolean('Settings', 'mouse_capture')

        if not config_parser.has_option('Settings', 'own_prefix'):
            self.own_prefix = False
            config_parser.set('Settings', 'own_prefix', str(self.own_prefix))
        else:
            self.own_prefix = config_parser.getboolean('Settings', 'own_prefix')

        if not config_parser.has_option('Settings', 'winearch'):
            self.winearch = 'win32'
            config_parser.set('Settings', 'winearch', str(self.winearch))
        else:
            self.winearch = config_parser.get('Settings', 'winearch')

        if not config_parser.has_option('Settings', 'command_before'):
            self.command_before = ''
            config_parser.set('Settings', 'command_before', str(self.command_before))
        else:
            self.command_before = config_parser.get('Settings', 'command_before')

        if not config_parser.has_option('Settings', 'command_after'):
            self.command_after = ''
            config_parser.set('Settings', 'command_after', str(self.command_after))
        else:
            self.command_after = config_parser.get('Settings', 'command_after')

        new_config_file = open(config_file, 'w')
        config_parser.write(new_config_file)
        new_config_file.close()
コード例 #35
0
ファイル: params.py プロジェクト: DataForces/CV_LUNA
    def __init__(self, config_file_path):
        cf = ConfigParser()
        read_from = cf.read(config_file_path)

        print "Loaded configurations from (in order)", read_from

        self.CONFIG = cf
        cf.set('info','config_file', config_file_path)

        if not cf.has_option('info','model_id'):
            cf.set('info','model_id', str(int(time.time()))+"_"+cf.get('info','name'))

        # Info
        self.EXPERIMENT = cf.get('info', 'experiment')
        self.NAME = cf.get('info', 'name')
        self.MODEL_ID = cf.get('info', 'model_id')

        # Dataset
        self.PIXELS = cf.getint('dataset','pixels')
        self.CHANNELS = cf.getint('dataset','channels')
        self.N_CLASSES = cf.getint('dataset','n_classes')

        self.SUBSET = None if cf.get('dataset','subset')=='None' else cf.getint('dataset','subset')

        self.FILENAMES_TRAIN = cf.get('dataset','filenames_train')
        self.FILENAMES_VALIDATION = cf.get('dataset','filenames_validation')
        self.DATA_FOLDER = cf.get('dataset','data_folder')


        # Network
        self.ARCHITECTURE = cf.get('network', 'architecture')

        # Network - U-net specific
        self.INPUT_SIZE = cf.getint('network', 'input_size')
        self.DEPTH = cf.getint('network', 'depth')
        self.BRANCHING_FACTOR = cf.getint('network', 'branching_factor')
        self.BATCH_NORMALIZATION = cf.getboolean('network', 'batch_normalization')
        self.BATCH_NORMALIZATION_ALPHA = cf.getfloat('network', 'batch_normalization_alpha')
        self.DROPOUT = cf.getfloat('network', 'dropout')
        self.SPATIAL_DROPOUT = cf.getfloat('network', 'spatial_dropout')
        self.GAUSSIAN_NOISE = cf.getfloat('network', 'gaussian_noise')

        # Updates
        self.OPTIMIZATION = cf.get('updates', 'optimization')
        self.LEARNING_RATE = cf.getfloat('updates', 'learning_rate')
        self.MOMENTUM = cf.getfloat('updates', 'momentum')
        self.L2_LAMBDA = cf.getfloat('updates', 'l2_lambda')

        self.BATCH_SIZE_TRAIN = cf.getint('updates', 'batch_size_train')
        self.BATCH_SIZE_VALIDATION = cf.getint('updates', 'batch_size_validation')
        self.N_EPOCHS = cf.getint('updates', 'n_epochs')

        # Normalization
        self.ZERO_CENTER = cf.getboolean('normalization', 'zero_center')
        if self.CHANNELS == 0:
            self.MEAN_PIXEL = cf.getfloat('normalization', 'mean_pixel')
        else:
            self.MEAN_PIXEL = map(float, cf.get('normalization', 'mean_pixel').split())

        # Preprocessing
        self.RANDOM_CROP = cf.getint('preprocessing', 'random_crop')
        self.ERODE_SEGMENTATION = cf.getint('preprocessing', 'erode_segmentation')

        # Augmentation
        self.AUGMENT = cf.getboolean('augmentation', 'augment')
        self.AUGMENTATION_PARAMS = {
            'flip': cf.getboolean('augmentation', 'flip'),
            'zoom_range': (1.-cf.getfloat('augmentation', 'zoom'),1.+cf.getfloat('augmentation', 'zoom')),
            'rotation_range': (-cf.getfloat('augmentation', 'rotation'),cf.getfloat('augmentation', 'rotation')),
            'translation_range': (-cf.getfloat('augmentation', 'translation'),cf.getfloat('augmentation', 'translation'))
        }

        # Misc
        self.MULTIPROCESS_LOAD_AUGMENTATION = cf.getboolean('misc', 'multiprocess_load_augmentation')
        self.N_WORKERS_LOAD_AUGMENTATION = cf.getint('misc', 'n_workers_load_augmentation')
        self.SAVE_EVERY_N_EPOCH = cf.getint('misc', 'save_every_n_epoch')
コード例 #36
0
 def getboolean(self, *args, **kwargs):
     try:
         return ConfigParser.getboolean(self, *args, **kwargs)
     except AttributeError:
         return False
コード例 #37
0
ファイル: config.py プロジェクト: liuyongdk/platform
class PlatformUserConfig:
    def __init__(self, config_file):
        self.log = logger.get_logger('PlatformUserConfig')
        self.parser = ConfigParser()
        self.filename = config_file

    def update_redirect(self, domain, api_url):
        self.parser.read(self.filename)
        self.log.info('setting domain={0}, api_url={1}'.format(
            domain, api_url))

        self.__set('redirect', 'domain', domain)
        self.__set('redirect', 'api_url', api_url)
        self.__save()

    def get_redirect_domain(self):
        self.parser.read(self.filename)
        if self.parser.has_section('redirect') and self.parser.has_option(
                'redirect', 'domain'):
            return self.parser.get('redirect', 'domain')
        return 'syncloud.it'

    def get_redirect_api_url(self):
        self.parser.read(self.filename)
        if self.parser.has_section('redirect') and self.parser.has_option(
                'redirect', 'api_url'):
            return self.parser.get('redirect', 'api_url')
        return 'http://api.syncloud.it'

    def set_user_update_token(self, user_update_token):
        self.parser.read(self.filename)
        self.__set('redirect', 'user_update_token', user_update_token)
        self.__save()

    def get_user_update_token(self):
        self.parser.read(self.filename)
        return self.parser.get('redirect', 'user_update_token')

    def set_user_email(self, user_email):
        self.parser.read(self.filename)
        self.__set('redirect', 'user_email', user_email)
        self.__save()

    def get_user_email(self):
        self.parser.read(self.filename)
        return self.parser.get('redirect', 'user_email')

    def set_custom_domain(self, custom_domain):
        self.parser.read(self.filename)
        self.__set('platform', 'custom_domain', custom_domain)
        self.__save()

    def get_custom_domain(self):
        self.parser.read(self.filename)
        if self.parser.has_option('platform', 'custom_domain'):
            return self.parser.get('platform', 'custom_domain')
        return None

    def get_user_domain(self):
        self.parser.read(self.filename)
        if self.parser.has_option('platform', 'user_domain'):
            return self.parser.get('platform', 'user_domain')
        return None

    def get_domain_update_token(self):
        self.parser.read(self.filename)
        if self.parser.has_option('platform', 'domain_update_token'):
            return self.parser.get('platform', 'domain_update_token')
        return None

    def update_domain(self, user_domain, domain_update_token):
        self.parser.read(self.filename)
        self.log.info(
            'saving user_domain = {0}, domain_update_token = {0}'.format(
                user_domain, domain_update_token))
        self.__set('platform', 'user_domain', user_domain)
        self.__set('platform', 'domain_update_token', domain_update_token)
        self.__save()

    def get_external_access(self):
        self.parser.read(self.filename)
        if not self.parser.has_option('platform', 'external_access'):
            return False
        return self.parser.getboolean('platform', 'external_access')

    def is_redirect_enabled(self):
        self.parser.read(self.filename)
        if not self.parser.has_option('platform', 'redirect_enabled'):
            return True
        return self.parser.getboolean('platform', 'redirect_enabled')

    def set_redirect_enabled(self, enabled):
        self.parser.read(self.filename)
        self.__set('platform', 'redirect_enabled', enabled)
        self.__save()

    def update_device_access(self, upnp_enabled, external_access, public_ip,
                             manual_certificate_port, manual_access_port):
        self.parser.read(self.filename)
        self.__set('platform', 'external_access', external_access)
        self.__set('platform', 'upnp', upnp_enabled)
        self.__set('platform', 'public_ip', public_ip)
        self.__set('platform', 'manual_certificate_port',
                   manual_certificate_port)
        self.__set('platform', 'manual_access_port', manual_access_port)
        self.__save()

    def get_upnp(self):
        self.parser.read(self.filename)
        if not self.parser.has_option('platform', 'upnp'):
            return True
        return self.parser.getboolean('platform', 'upnp')

    def get_public_ip(self):
        self.parser.read(self.filename)
        if not self.parser.has_option('platform', 'public_ip'):
            return None
        return self.parser.get('platform', 'public_ip')

    def get_manual_certificate_port(self):
        self.parser.read(self.filename)
        if not self.parser.has_option('platform', 'manual_certificate_port'):
            return None
        return self.parser.get('platform', 'manual_certificate_port')

    def get_manual_access_port(self):
        self.parser.read(self.filename)
        if not self.parser.has_option('platform', 'manual_access_port'):
            return None
        return self.parser.get('platform', 'manual_access_port')

    def __set(self, section, key, value):
        if not self.parser.has_section(section):
            self.parser.add_section(section)
        if value is None:
            self.parser.remove_option(section, key)
        else:
            self.parser.set(section, key, value)

    def __save(self):
        with open(self.filename, 'wb') as f:
            self.parser.write(f)
コード例 #38
0
ファイル: config.py プロジェクト: liuyongdk/platform
class PlatformConfig:
    def __init__(self, config_dir):
        self.parser = ConfigParser()
        self.filename = join(config_dir, PLATFORM_CONFIG_NAME)
        if (not isfile(self.filename)):
            raise Exception('platform config does not exist: {0}'.format(
                self.filename))
        self.parser.read(self.filename)

    def apps_root(self):
        return self.__get('apps_root')

    def data_root(self):
        return self.__get('data_root')

    def configs_root(self):
        return self.__get('configs_root')

    def config_root(self):
        return self.__get('config_root')

    def www_root_internal(self):
        return self.__get('www_root_internal')

    def www_root_public(self):
        return self.__get('www_root_public')

    def app_dir(self):
        return self.__get('app_dir')

    def data_dir(self):
        return self.__get('data_dir')

    def config_dir(self):
        return self.__get('config_dir')

    def bin_dir(self):
        return self.__get('bin_dir')

    def nginx_config_dir(self):
        return self.__get('nginx_config_dir')

    def cron_user(self):
        return self.__get('cron_user')

    def cron_cmd(self):
        return self.__get('cron_cmd')

    def openssl(self):
        return self.__get('openssl')

    def nginx(self):
        return self.__get('nginx')

    def cron_schedule(self):
        return self.__get('cron_schedule')

    def get_web_secret_key(self):
        return self.__get('web_secret_key')

    def set_web_secret_key(self, value):
        return self.__set('web_secret_key', value)

    def get_user_config(self):
        return self.__get('user_config')

    def get_log_root(self):
        return self.__get('log_root')

    def get_log_sender_pattern(self):
        return self.__get('log_sender_pattern')

    def get_internal_disk_dir(self):
        return self.__get('internal_disk_dir')

    def get_external_disk_dir(self):
        return self.__get('external_disk_dir')

    def get_disk_link(self):
        return self.__get('disk_link')

    def get_disk_root(self):
        return self.__get('disk_root')

    def get_ssh_port(self):
        return self.__get('ssh_port')

    def set_ssh_port(self, value):
        return self.__set('ssh_port', value)

    def get_rest_internal_log(self):
        return self.__get('rest_internal_log')

    def get_rest_public_log(self):
        return self.__get('rest_public_log')

    def get_ssl_certificate_file(self):
        return self.__get('ssl_certificate_file')

    def get_ssl_ca_certificate_file(self):
        return self.__get('ssl_ca_certificate_file')

    def get_ssl_ca_serial_file(self):
        return self.__get('ssl_ca_serial_file')

    def get_ssl_certificate_request_file(self):
        return self.__get('ssl_certificate_request_file')

    def get_default_ssl_certificate_file(self):
        return self.__get('default_ssl_certificate_file')

    def get_ssl_key_file(self):
        return self.__get('ssl_key_file')

    def get_ssl_ca_key_file(self):
        return self.__get('ssl_ca_key_file')

    def get_default_ssl_key_file(self):
        return self.__get('default_ssl_key_file')

    def get_openssl_config(self):
        return self.__get('openssl_config')

    def get_platform_log(self):
        return self.__get('platform_log')

    def get_installer(self):
        return self.__get('installer')

    def get_hooks_root(self):
        return self.__get('hooks_root')

    def is_certbot_test_cert(self):
        return self.parser.getboolean('platform', 'certbot_test_cert')

    def get_boot_extend_script(self):
        return self.__get('boot_extend_script')

    def __get(self, key):
        return self.parser.get('platform', key)

    def __set(self, key, value):
        self.parser.set('platform', key, value)
        with open(self.filename, 'wb') as f:
            self.parser.write(f)
コード例 #39
0
ファイル: _config.py プロジェクト: darmis007/myokit
def _load():
    """
    Reads the configuration file and attempts to set the library paths.
    """
    # Location of configuration file
    path = os.path.join(myokit.DIR_USER, 'myokit.ini')

    # No file present? Create one and return
    if not os.path.isfile(path):
        _create(path)

    # Create the config parser (no value allows comments)
    config = ConfigParser(allow_no_value=True)

    # Make the parser case sensitive (need for unix paths!)
    config.optionxform = str

    # Parse the config file
    config.read(path)

    # Date format
    if config.has_option('time', 'date_format'):
        x = config.get('time', 'date_format')
        if x:
            myokit.DATE_FORMAT = str(x)

    # Time format
    if config.has_option('time', 'time_format'):
        x = config.get('time', 'time_format')
        if x:
            myokit.TIME_FORMAT = str(x)

    # Add line numbers to debug output of simulations
    if config.has_option('debug', 'line_numbers'):
        try:
            myokit.DEBUG_LINE_NUMBERS = config.getboolean(
                'debug', 'line_numbers')
        except ValueError:  # pragma: no cover
            pass

    # GUI Backend
    if config.has_option('gui', 'backend'):
        x = config.get('gui', 'backend').strip().lower()
        if x == 'pyqt' or x == 'pyqt4':
            myokit.FORCE_PYQT4 = True
            myokit.FORCE_PYQT5 = False
            myokit.FORCE_PYSIDE = False
            myokit.FORCE_PYSIDE2 = False
        elif x == 'pyqt5':
            myokit.FORCE_PYQT4 = False
            myokit.FORCE_PYQT5 = True
            myokit.FORCE_PYSIDE = False
            myokit.FORCE_PYSIDE2 = False
        elif x == 'pyside':
            myokit.FORCE_PYQT4 = False
            myokit.FORCE_PYQT5 = False
            myokit.FORCE_PYSIDE = True
            myokit.FORCE_PYSIDE2 = False
        elif x == 'pyside2':
            myokit.FORCE_PYQT4 = False
            myokit.FORCE_PYQT5 = False
            myokit.FORCE_PYSIDE = False
            myokit.FORCE_PYSIDE2 = True
        #else:
        # If empty or invalid, don't adjust the settings!

    # Sundials libraries, header files, and version
    if config.has_option('sundials', 'lib'):
        for x in config.get('sundials', 'lib').split(';'):
            myokit.SUNDIALS_LIB.append(x.strip())
    if config.has_option('sundials', 'inc'):
        for x in config.get('sundials', 'inc').split(';'):
            myokit.SUNDIALS_INC.append(x.strip())
    if config.has_option('sundials', 'version'):
        try:
            myokit.SUNDIALS_VERSION = int(config.get('sundials', 'version'))
        except ValueError:  # pragma: no cover
            pass

    # Dynamically add embedded sundials paths for windows
    if platform.system() == 'Windows':  # pragma: no linux cover
        _dynamically_add_embedded_sundials_win()

    # If needed, attempt auto-detection of Sundials version
    if myokit.SUNDIALS_VERSION == 0:  # pragma: no cover
        sundials = myokit.Sundials.version_int()
        log = logging.getLogger(__name__)
        if sundials is None:
            log.warning('Sundials version not set in myokit.ini and version'
                        ' auto-detection failed.')
        else:
            myokit.SUNDIALS_VERSION = sundials
            log.warning(
                'Sundials version not set in myokit.ini. Continuing with'
                ' detected version (' + str(sundials) + '). For a tiny'
                ' performance boost, please set this version in ' + path)

    # OpenCL libraries and header files
    if config.has_option('opencl', 'lib'):
        for x in config.get('opencl', 'lib').split(';'):
            myokit.OPENCL_LIB.append(x.strip())
    if config.has_option('opencl', 'inc'):
        for x in config.get('opencl', 'inc').split(';'):
            myokit.OPENCL_INC.append(x.strip())
コード例 #40
0
def create_config(config_path):
    """ Parse config. """
    config = dict()
    config_raw = ConfigParser()
    config_raw.read(DEFAULT_CONFIG)
    config_raw.read(config_path)
    config['timespan'] = config_raw.getint('Nest Config',
                                           'TIMESPAN_SINCE_CHANGE')
    config['min_pokemon'] = config_raw.getint('Nest Config',
                                              'MIN_POKEMON_NEST_COUNT')
    config['min_spawn'] = config_raw.getint('Nest Config',
                                            'MIN_SPAWNPOINT_NEST_COUNT')
    config['delete_old'] = config_raw.getboolean('Nest Config',
                                                 'DELETE_OLD_NESTS')
    config['event_poke'] = json.loads(
        config_raw.get('Nest Config', 'EVENT_POKEMON'))
    config['pokestop_pokemon'] = config_raw.getboolean('Nest Config',
                                                       'POKESTOP_POKEMON')
    config['area_name'] = config_raw.get('Area', 'NAME')
    config['scan_hours'] = config_raw.getfloat('Area', 'SCAN_HOURS_PER_DAY')
    config['p1_lat'] = config_raw.getfloat('Area', 'POINT1_LAT')
    config['p1_lon'] = config_raw.getfloat('Area', 'POINT1_LON')
    config['p2_lat'] = config_raw.getfloat('Area', 'POINT2_LAT')
    config['p2_lon'] = config_raw.getfloat('Area', 'POINT2_LON')
    config['db_r_host'] = config_raw.get('DB Read', 'HOST')
    config['db_r_name'] = config_raw.get('DB Read', 'NAME')
    config['db_r_user'] = config_raw.get('DB Read', 'USER')
    config['db_r_pass'] = config_raw.get('DB Read', 'PASSWORD')
    config['db_r_port'] = config_raw.getint('DB Read', 'PORT')
    config['db_r_charset'] = config_raw.get('DB Read', 'CHARSET')
    config['db_pokemon'] = config_raw.get('DB Read', 'TABLE_POKEMON')
    config['db_pokemon_spawnid'] = config_raw.get('DB Read',
                                                  'TABLE_POKEMON_SPAWNID')
    config['db_pokemon_timestamp'] = config_raw.get('DB Read',
                                                    'TABLE_POKEMON_TIMESTAMP')
    config['db_pokestop'] = config_raw.get('DB Read', 'TABLE_POKESTOP')
    config['db_spawnpoint'] = config_raw.get('DB Read', 'TABLE_SPAWNPOINT')
    config['db_spawnpoint_id'] = config_raw.get('DB Read',
                                                'TABLE_SPAWNPOINT_ID')
    config['db_spawnpoint_lat'] = config_raw.get('DB Read',
                                                 'TABLE_SPAWNPOINT_LAT')
    config['db_spawnpoint_lon'] = config_raw.get('DB Read',
                                                 'TABLE_SPAWNPOINT_LON')
    config['use_unix_timestamp'] = config_raw.getboolean(
        'DB Read', 'USE_UNIX_TIMESTAMP')
    config['db_w_host'] = config_raw.get('DB Write', 'HOST')
    config['db_w_name'] = config_raw.get('DB Write', 'NAME')
    config['db_w_user'] = config_raw.get('DB Write', 'USER')
    config['db_w_pass'] = config_raw.get('DB Write', 'PASSWORD')
    config['db_w_port'] = config_raw.getint('DB Write', 'PORT')
    config['db_w_charset'] = config_raw.get('DB Write', 'CHARSET')
    config['db_nest'] = config_raw.get('DB Write', 'TABLE_NESTS')
    config['save_path'] = config_raw.get('Geojson', 'SAVE_PATH')
    config['geojson_extend'] = config_raw.getboolean('Geojson',
                                                     'GEOJSON_EXTEND')
    config['default_park_name'] = config_raw.get('Geojson',
                                                 'DEFAULT_PARK_NAME')
    config['json-stroke'] = config_raw.get('Geojson', 'STROKE')
    config['json-stroke-width'] = config_raw.getfloat('Geojson',
                                                      'STROKE-WIDTH')
    config['json-stroke-opacity'] = config_raw.getfloat(
        'Geojson', 'STROKE-OPACITY')
    config['json-fill'] = config_raw.get('Geojson', 'FILL')
    config['json-fill-opacity'] = config_raw.getfloat('Geojson',
                                                      'FILL-OPACITY')
    config['encoding'] = config_raw.get('Other', 'ENCODING')
    config['verbose'] = config_raw.getboolean('Other', 'VERBOSE')
    config['osm_date'] = config_raw.get('Other', 'OSM_DATE')

    return config
コード例 #41
0
ファイル: run_clean.py プロジェクト: folguinch/GoContinuum
def main():
    # Command line options
    parser = argparse.ArgumentParser()
    parser.add_argument('-c', nargs=1, 
            help='Casa parameter.')
    group = parser.add_mutually_exclusive_group(required=False)
    group.add_argument('--all_spws', action='store_true', 
            help='Combine all spectral windows')
    group.add_argument('--spw', type=str, nargs=1, default=None,
            help='Value for tclean spw')
    parser.add_argument('--section', nargs=1, type=str, default=['dirty'],
            help='Configuration section name')
    parser.add_argument('configfile', nargs=1, type=str,
            help='Configuration file name')
    parser.add_argument('outputdir', nargs=1, type=str,
            help='Output directory')
    parser.add_argument('uvdata', nargs='*', type=str,
            help='UV data to extract dirty images')
    args = parser.parse_args()

    # Configuration file
    config = ConfigParser({'robust':'0.5', 'deconvolver':'hogbom',
        'specmode':'cube', 'outframe':'LSRK', 'gridder':'standard',
        'interactive':'False', 'weighting':'briggs', 'niter':'0',
        'chancunks':'-1'})
    config.read(args.configfile[0])
    section = args.section[0]

    # Check cell size and imsize are in the config
    if 'cell' not in config.options(section):
        raise KeyError('Missing cell in configuration')
    if 'imsize' not in config.options(section):
        raise KeyError('Missing imsize in configuration')

    # Common arguments, add as needed
    float_keys = ['robust', 'pblimit', 'pbmask'] 
    int_keys = ['niter', 'chanchunks']
    bool_keys = ['interactive', 'parallel', 'pbcor']
    ignore_keys = ['vis', 'imagename', 'spw']
    tclean_pars = {}
    for key in tclean.parameters.keys():
        if key not in config.options(section) or key in ignore_keys:
            continue
        #Check for type:
        if key in float_keys:
            tclean_pars[key] = config.getfloat(section, key)
        elif key in int_keys:
            tclean_pars[key] = config.getint(section, key)
        elif key in bool_keys:
            tclean_pars[key] = config.getboolean(section, key)
        elif key=='imsize':
            tclean_pars[key] = map(int, config.get(section, key).split())
        else:
            tclean_pars[key] = str(config.get(section, key))
    casalog.post('tclean non-default parameters: %r' % tclean_pars)

    for ms in args.uvdata:
        # Number of spws
        nspws = len(vishead(vis=ms, mode='list')['spw_name'][0])
        casalog.post('Processing ms: %s' % ms)
        casalog.post('Number of spws in ms %s: %i' % (ms, nspws))

        # Extract properties from ms file name
        msname = os.path.basename(ms.strip('/'))
        if msname.endswith('.ms'):
            msname, ext = os.path.splitext(msname)
        elif '.ms.' in msname:
            msname = msname.replace('.ms.','.')
        else:
            pass

        # Cases:
        # Combine spws or compute just for specific spw
        if args.all_spws or 'spw' in config.options(section) or \
                args.spw is not None:
            spw = ','.join(map(str,range(nspws)))
            if args.spw:
                spw = args.spw[0]
                imagename = '{0}/{1}.spw{2}.robust{3}'.format(args.outputdir[0],
                        msname, spw, tclean_pars['robust'])
            elif 'spw' in config.options(section) and \
                    config.get(section,'spw')!=spw:
                spw = config.options(section)
                imagename = '{0}/{1}.spw{2}.robust{3}'.format(args.outputdir[0],
                        msname, spw, tclean_pars['robust'])
            else:
                imagename = '{0}/{1}.robust{2}'.format(args.outputdir[0],
                        msname, tclean_pars['robust'])
            run_tclean(vis=ms, spw=spw, imagename=imagename,
                    **tclean_pars)
        else:
            # All spectral windows one by one
            for spw in range(nspws):
                imagename = '{0}/{1}.spw{2}.robust{3}'.format(args.outputdir[0],
                        msname, spw, tclean_pars['robust'])
                run_tclean(vis=ms, spw='%i' % spw,
                        imagename=imagename, **tclean_pars)
コード例 #42
0
    def config_load(self):

        config_file = self.install_dir + '/' + self.game_name + '/config.ini'
        config_parser = ConfigParser()
        config_parser.read(config_file)

        if not config_parser.has_section('Settings'):
            config_parser.add_section('Settings')

        if not config_parser.has_option('Settings', 'dosbox'):
            self.dosbox = 'global'
            config_parser.set('Settings', 'dosbox', str(self.dosbox))
        else:
            self.dosbox = config_parser.get('Settings', 'dosbox')

        if not config_parser.has_option('Settings', 'dosbox_path'):
            self.dosbox_path = global_dosbox_path
            config_parser.set('Settings', 'dosbox_path', str(self.dosbox_path))
        else:
            self.dosbox_path = config_parser.get('Settings', 'dosbox_path')

        if not config_parser.has_option('Settings', 'dosbox_version'):
            self.dosbox_version = global_dosbox_version
            config_parser.set('Settings', 'dosbox_version',
                              str(self.dosbox_version))
        else:
            self.dosbox_version = config_parser.get('Settings',
                                                    'dosbox_version')

        if not config_parser.has_option('Settings', 'own_dosbox_mapperfile'):
            self.own_dosbox_mapperfile = False
            config_parser.set('Settings', 'own_dosbox_mapperfile',
                              str(self.own_dosbox_mapperfile))
        else:
            self.own_dosbox_mapperfile = config_parser.getboolean(
                'Settings', 'own_dosbox_mapperfile')

        if not config_parser.has_option('Settings', 'monitor'):
            self.monitor = global_monitor
            config_parser.set('Settings', 'monitor', str(self.monitor))
        else:
            self.monitor = config_parser.getint('Settings', 'monitor')

        if not config_parser.has_option('Settings', 'launcher'):
            self.launcher = True
            config_parser.set('Settings', 'launcher', str(self.launcher))
        else:
            self.launcher = config_parser.getboolean('Settings', 'launcher')

        if not config_parser.has_option('Settings', 'show_banner'):
            self.show_banner = True
            config_parser.set('Settings', 'show_banner', str(self.show_banner))
        else:
            self.show_banner = config_parser.getboolean(
                'Settings', 'show_banner')

        if not config_parser.has_option('Settings', 'command_before'):
            self.command_before = ''
            config_parser.set('Settings', 'command_before',
                              str(self.command_before))
        else:
            self.command_before = config_parser.get('Settings',
                                                    'command_before')

        if not config_parser.has_option('Settings', 'command_after'):
            self.command_after = ''
            config_parser.set('Settings', 'command_after',
                              str(self.command_after))
        else:
            self.command_after = config_parser.get('Settings', 'command_after')

        new_config_file = open(config_file, 'w')
        config_parser.write(new_config_file)
        new_config_file.close()
コード例 #43
0
try:
    logfile = config.get('IRC', 'log')
except BaseException:
    logfile = '/var/log/turing.log'

loglevel = logging.INFO
logformat = '%(asctime)s - %(levelname)s - %(message)s'
logging.basicConfig(filename=logfile, format=logformat, level=loglevel)

####################################################
#             Set IRC connection values            #
####################################################

try:
    tls = config.getboolean('IRC', 'tls')
    port = config.getint('IRC', 'port')
    quake = config.getboolean('Features', 'quake')
    server = config.get('IRC', 'host')
    strava = config.getboolean('Features', 'strava')
    channel = config.get('IRC', 'channel')
    botnick = config.get('IRC', 'nick')
    passreq = config.getboolean('IRC', 'passreq')
    oxford_id = config.get('API Keys', 'oxford_id')
    oxford_key = config.get('API Keys', 'oxford_key')
    mag_thresh = config.getint('Earthquake', 'mag_thresh')
    weather_key = config.get('API Keys', 'weather_key')
    if strava:
        strava_key = config.get('API Keys', 'strava_key')
    if passreq:
        password = config.get('IRC', 'password')
コード例 #44
0
ファイル: ctest.py プロジェクト: PhelanWang/CloudProject
class SwitchAgent:
    def __init__(self, module_name):
        global agent_version
        global remote_base_url
        global server_port
        self.module_name = module_name
        self.modules = None
        self.local_key = None
        self.agent_port = None
        self.debug_mode = True
        self.multi_thread = False
        self.services_online = []
        try:
            config_name = '%s.conf' % __name__
            self.config = ConfigParser()
            self.config.read(config_name)
            server_port = int(self.config.get('network', 'server-port',
                                              '5000'))
            db_filename = self.config.get('database', 'file', ':memory:')
            agent_version = self.config.get('system', 'version', '1.0.3')
            #remote base url
            remote_base_url = self.config.get('network', 'server-base',
                                              'http://localhost:5000')
            self.local_key = self.config.get('module', 'local-key',
                                             'agent_path')
            paths = self.config.get('module', 'path', '')
            if paths:
                self.modules = str(paths).split(';')
                # connection database
            connection(db_filename).prepare()
            self.app = Flask(self.module_name)
            # register_api_resources(Api(self.app), API_MAP, '/switch/agent/')
            register_api_resources(Api(self.app), API_MAP, '')
        except Exception as e:
            print 'Error on initialization.\nPlease check if the config file name is "%s"' % config_name
            print_exception(__name__, e)

        return

    def post_ip(self):
        payload = {'ip': getip(), 'port': self.agent_port, 'type': 0}
        post_url('%s/switch/ip' % remote_base_url, payload)

    # 注册节点信息
    def post_host_info(self):
        from host_info import get_sytem_info
        print '%s/nodeRegister' % remote_base_url
        post_url('%s/nodeRegister' % remote_base_url, payload=get_sytem_info())

    def entry(self, service_name, version='1.0.1'):
        def register_entry(F):
            try:
                # print service_name, version,'a'
                service = service_name + version
                register_service(service_name, version)
                server_entry[service] = F
                self.services_online.append({
                    'name': service_name,
                    'version': version
                })
            except:
                pass
            return F

        return register_entry

    @staticmethod
    def instrusive(F):
        try:
            function_name = F.__name__
            instrusive_entry[function_name] = F
        except:
            pass
        return F

    def tell_online_services(self):
        for service in self.services_online:
            TaskRequest.post_servtag(service['name'], service['version'],
                                     self.agent_port)

    @staticmethod
    def post_report(subtask_id,
                    severity,
                    result,
                    brief,
                    detail,
                    json_data=None):
        # global TASK_STATUS
        # TaskRequest.set_subtask_status(subtask_id, TASK_STATUS['done'])
        # TaskRequest.put_subtask_status(subtask_id, TASK_STATUS['done'])
        TaskRequest.post_report(subtask_id, severity, result, brief, detail,
                                json_data)

    @staticmethod
    def post_failure(subtask_id):
        TaskRequest.set_subtask_status(subtask_id, TASK_STATUS['fail'])
        TaskRequest.put_subtask_status(subtask_id, TASK_STATUS['fail'])

    @staticmethod
    def get_global(global_key):
        var = global_wrapper(remote_base_url, global_key)
        if var:
            return var.value
        return

    @staticmethod
    def set_global(global_key, global_value):
        var = global_wrapper(remote_base_url, global_key)
        if var:
            var.value = global_value
            return var.write()
        return

    @staticmethod
    def global_wrapper(global_key):
        return global_wrapper(remote_base_url, global_key)

    @staticmethod
    def get_local(local_key):
        with connection.connect_db_row() as (db):
            result = db.query("SELECT value FROM local_table WHERE key='%s'" %
                              local_key)
            if result:
                return result[0]['value']
            return
        return

    @staticmethod
    def set_local(local_key, local_value):
        with connection.connect_db() as (db):
            result = db.query("SELECT key FROM local_table WHERE key='%s'" %
                              local_key)
            if result:
                db.execute_and_commit(
                    ['UPDATE local_table SET value=? WHERE key=?'],
                    [[(local_value, local_key)]])
            else:
                db.execute_and_commit(['INSERT INTO local_table VALUES(?, ?)'],
                                      [[(local_key, local_value)]])
            return True
        return False

    def run(self):
        try:
            self.agent_port = int(
                self.config.get('network', 'agent-port', '9099'))
            self.debug_mode = self.config.getboolean('system', 'debug')
            self.multi_thread = self.config.getboolean('system',
                                                       'multi-thread')
            # 不用注册远程方法了
            # self.tell_online_services()

            # 暂时不用
            # self.post_ip()
            # self.post_host_info()
            result = post_url('%s/openStack/initRegister' % remote_base_url,
                              payload=get_nfs_disk())
            post_url('%s/openStack/nodeRegister' % remote_base_url,
                     payload=get_sytem_info())
            print('register result: ', result)
            self.app.run(host='0.0.0.0',
                         port=self.agent_port,
                         debug=self.debug_mode,
                         use_reloader=self.debug_mode,
                         threaded=self.multi_thread)
        except Exception as e:
            print_exception(__name__, e)
コード例 #45
0
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, Gdk, GdkPixbuf
import gettext

try:
    from ConfigParser import ConfigParser as ConfigParser
except:
    from configparser import ConfigParser as ConfigParser

from modules import monitors, paths

global_config_file = os.getenv('HOME') + '/.games_nebula/config/config.ini'
global_config_parser = ConfigParser()
global_config_parser.read(global_config_file)
gtk_theme = global_config_parser.get('visuals', 'gtk_theme')
gtk_dark = global_config_parser.getboolean('visuals', 'gtk_dark')
icon_theme = global_config_parser.get('visuals', 'icon_theme')
font = global_config_parser.get('visuals','font')
screen = Gdk.Screen.get_default()
gsettings = Gtk.Settings.get_for_screen(screen)
gsettings.set_property('gtk-theme-name', gtk_theme)
gsettings.set_property('gtk-application-prefer-dark-theme', gtk_dark)
gsettings.set_property('gtk-icon-theme-name', icon_theme)
gsettings.set_property('gtk-font-name', font)

global_wine =  global_config_parser.get('emulation settings', 'wine')
global_wine_path = global_config_parser.get('emulation settings', 'wine_path')
global_wine_version = global_config_parser.get('emulation settings', 'wine_version')
global_monitor = global_config_parser.getint('emulation settings', 'monitor')

goglib_install_dir = global_config_parser.get('goglib preferences', 'goglib_install_dir')
コード例 #46
0
class Config(object):
    def __init__(self, inifile='data/config.ini'):
        self.inifile = inifile
        self.cfg = ConfigParser()

        with FileLock(self.inifile):
            if os.path.exists(inifile):
                self.cfg.read(inifile)

            # initialize configurations
            default_configs = {
                'server': {
                    'ip': '*',
                    'port': '8888',
                    'lastcheckupdate': 0,
                    'updateinfo': ''
                },
                'auth': {
                    'username': '******',
                    'password': '',  # empty password never validated
                    'passwordcheck': 'on',
                    'accesskey': '',  # empty access key never validated
                    'accesskeyenable': 'off',
                },
                'runtime': {
                    'mode': '',
                    'loginlock': 'off',
                    'loginfails': 0,
                    'loginlockexpire': 0,
                },
                'file': {
                    'lastdir': '/root',
                    'lastfile': '',
                }
            }
            needupdate = False
            for sec, secdata in default_configs.iteritems():
                if not self.cfg.has_section(sec):
                    self.cfg.add_section(sec)
                    needupdate = True
                for opt, val in secdata.iteritems():
                    if not self.cfg.has_option(sec, opt):
                        self.cfg.set(sec, opt, val)
                        needupdate = True

            # update ini file
            if needupdate:
                self.update(False)

    def update(self, lock=True):
        if lock:
            flock = FileLock(self.inifile)
            flock.acquire()

        try:
            inifp = open(self.inifile, 'w')
            self.cfg.write(inifp)
            inifp.close()
            if lock:
                flock.release()
            return True
        except:
            if lock:
                flock.release()
            return False

    def has_option(self, section, option):
        return self.cfg.has_option(section, option)

    def get(self, section, option):
        return self.cfg.get(section, option)

    def getboolean(self, section, option):
        return self.cfg.getboolean(section, option)

    def getint(self, section, option):
        return self.cfg.getint(section, option)

    def has_section(self, section):
        return self.cfg.has_section(section)

    def add_section(self, section):
        return self.cfg.add_section(section)

    def remove_option(self, section):
        return self.cfg.remove_option(section)

    def set(self, section, option, value):
        try:
            self.cfg.set(section, option, value)
        except:
            return False
        return self.update()
コード例 #47
0
            config.write(fobj)
    else:
        config.read(configfile)

    # We need to translate the logging levels from the string thats in the
    # config file to something that the logging module understands.
    log_levels = {
        'debug': logging.DEBUG,
        'info': logging.INFO,
        'warn': logging.WARN,
        'error': logging.ERROR,
    }

    # Lets setup the logging file handler so that we can actually output a log
    # file.
    fh = logging.FileHandler('scan_downloads.log')
    fh.setFormatter(
        logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
    fh.setLevel(log_levels[config.get('Logging', 'level')])
    logger.addHandler(fh)

    # Now lets actually connect to the SecurityCenter instance and then pass the
    # sc object off to the scan downloader function.
    sc = SecurityCenter5(config.get('SecurityCenter', 'host'))
    sc.login(config.get('SecurityCenter', 'user'),
             b64decode(config.get('SecurityCenter', 'pass')))
    download_scans(sc,
                   age=config.getint('ScanResults', 'age'),
                   unzip=config.getboolean('ScanResults', 'unzip'),
                   path=config.get('ScanResults', 'path'))
コード例 #48
0
        # the hash file is '~/.config/Voicely/hash'
        # the config file is '~/.config/Voicely/Voicely.conf'

        hf = config_dir + 'hash'
        cf = config_dir + config_file

        hash = md5.new()
        hf_data = open(hf).read() if os.path.exists(hf) else ""
        hash.update(open(cf).read())

        a = ConfigParser()
        a.read(cf)

        config_data = None

        if a.getboolean('__settings__', 'use_keyphrase'):
            ph = a.get('__settings__', 'keyphrase').upper()
            config_data = {(ph + " " + b.upper() if b != '__settings__' else b): dict(a.items(b)) for b in a.sections()}

            if c_dbg_flag:
                for k in config_data:
                    print k, config_data[k]

        else:
            config_data = {b.upper(): dict(a.items(b)) for b in a.sections()}

        if(hash.hexdigest() != hf_data):
            print __hash_not_found__
            # this is the recalculate language file case
            corpus(config_data)
コード例 #49
0
def get_config():

    import os
    import sys
    import socket
    if sys.version_info.major < 3:
        from ConfigParser import ConfigParser
    else:
        from configparser import ConfigParser
    CNF = ConfigParser()

    # Hostname specific config?
    host = socket.gethostname()
    if os.path.isfile("{:s}_config.conf".format(host)):
        log.info("Reading custom config file for \"{:s}\"".format(host))
        CNF.read("{:s}_config.conf".format(host))
    else:
        log.info("Reading default config.conf file")
        if not os.path.isfile("config.conf"):
            raise Exception("Cannot find config file config.conf")
        CNF.read("config.conf")

    # Output directory
    outdir = CNF.get("main", "outdir")
    # Base url
    url = CNF.get("main", "url")

    # Check if the user requests a subset.
    subset = CNF.getboolean("subset", "use")
    if subset:
        subset = {
            "W": CNF.getfloat("subset", "W"),
            "E": CNF.getfloat("subset", "E"),
            "S": CNF.getfloat("subset", "S"),
            "N": CNF.getfloat("subset", "N")
        }
    else:
        subset = None

    # read parameter config
    # [param xxx]
    # name  = ...
    # level = ...
    # xxx: identifier, currently not really used except to set up the dictionary (char)
    # name: name of the varaible according to the grib index file (char)
    # level: level of the variable according to the grib index file (char)
    from re import match
    sections = CNF.sections()
    params = dict()
    for sec in sections:
        mtch = match("^param\s+(.*)$", sec)
        if not mtch: continue
        # Checking items
        items = dict()
        for rec in CNF.items(sec):
            items[rec[0]] = rec[1]
        if not "name" in items.keys() or not "level" in items.keys(): continue
        params[mtch.group(1)] = "{:s}:{:s}".format(items["name"],
                                                   items["level"])
        log.info("Found parameter specification for {:s}".format(
            params[mtch.group(1)]))

    return [outdir, url, subset, params]
コード例 #50
0
class Adm6ConfigParser(ConfigParser):
    """
    Read global configuration from configfile named as parameter
    """
    def __init__(self, cfg_file):
        """
        initial read of config file
        """
        ConfigParser.__init__(self)
        self.homedir = os.getenv("HOME")
        #self.filename = self.homedir + cfg_file
        self.filename = os.path.join(self.homedir, cfg_file)
        self.cfp = ConfigParser()
        msg = "File not found: %s" % (self.filename)
        try:
            file = open(self.filename, 'r')
            content = file
            file.close()
        except:
            raise ValueError, msg
        self.cfp.read([self.filename])

    def get_show_cf(self):
        """
        return complete content as dict of dicts
        """
        retstr = ""
        for section in self.cfp.sections():
            retstr += str(section)
            retstr += str(self.cfp.items(section)) + '\n'
        return retstr

    def get_adm6_home(self):
        """return adm6 homedir as read from config-file"""
        return self.cfp.get('global', 'home', False, {})

    def get_adm6_debuglevel(self):
        """get applicationwide debuglevel"""
        level = int(self.cfp.get('global', 'debuglevel', False, {}))
        return level

    def set_adm6_debuglevel(self, level):
        """set applicationwide debuglevel"""
        self.cfp.set('global', 'debuglevel', str(level))
        with open(self.filename, 'wb') as configfile:
            self.cfp.write(configfile)
        configfile.close()
        return True

    def dec_adm6_debuglevel(self):
        """decrement debuglevel by one"""
        level = int(self.get_adm6_debuglevel()) - 1
        if level < 0:
            level = 0
        self.set_adm6_debuglevel(str(level))
        return True

    def inc_adm6_debuglevel(self):
        """increment debuglevel"""
        level = int(self.get_adm6_debuglevel())
        level = level + 1
        self.set_adm6_debuglevel(str(level))
        return True

    def get_apply(self, device):
        """give back applyflag (missing flag means true always!)"""
        section = "device#" + device.strip()
        return self.cfp.getboolean(section, 'active')

    def get_version(self):
        """return version string read from config-flie"""
        #return self.cfp.get('global', 'version').strip()
        return self.cfp.get('global', 'version')

    def get_key_filename(self):
        """return ssh key_file read from config-flie"""
        return self.cfp.get('global', 'key_file')

    def get_devices(self):
        """give a list of all devices named in global section"""
        return self.cfp.get('global', 'devices', False, {})

    def get_software(self):
        """give a list of all os-software named in global section"""
        return self.cfp.get('global', 'software', False, {})

    def get_device_home(self, device):
        """give directory of device as full pathname"""
        pat = self.get_adm6_home()
        pat = pat.strip() + 'desc/' + device.strip()
        return pat

    def get_desc(self, device):
        """give description of named device"""
        section = "device#" + device.strip()
        return self.cfp.get(section, 'desc')

    def get_os(self, device):
        """give OS-String of named device"""
        section = "device#" + device.strip()
        return self.cfp.get(section, 'os')

    def get_ip(self, device):
        """give IP of named device"""
        section = "device#" + device
        return self.cfp.get(section, 'ip')

    def get_fwd(self, device):
        """give back fwdflag (false means device does not forward IPv6!)"""
        section = "device#" + device.strip()
        fwd = False
        if self.cfp.has_section(section):
            if self.cfp.has_option(section, 'fwd'):
                fwd = self.cfp.get(section, 'fwd')
        return fwd > 0

    def get_asym(self, device):
        """give back asymmetric-flag
        (true means device does asymmetric IPv6 routing!)
        asymmetric = 1 forces stateful to off
        """
        section = "device#" + device.strip()
        asym = False
        if self.cfp.has_section(section):
            if self.cfp.has_option(section, 'asymmetric'):
                asym = self.cfp.get(section, 'asymmetric')
        return asym > 0

    def print_head(self, device):
        """
        print a nice header for named device-section
        """
        msg = "#" * 80
        msg += '\n'
        msg += self.nice_print('#', '')
        msg += self.nice_print("# Device:      ", device.strip())
        msg += self.nice_print('#', '')
        msg += self.nice_print('# Desc:        ',
                               self.get_desc(device.strip()))
        msg += self.nice_print('# OS:          ', self.get_os(device.strip()))
        msg += self.nice_print('# IP:          ', self.get_ip(device.strip()))
        msg += self.nice_print('# Forwarding:  ',
                               str(self.get_fwd(device.strip())))
        msg += self.nice_print('# Asymmetric:  ',
                               str(self.get_asym(device.strip())))
        msg += self.nice_print('#', '')
        msg += "#" * 80
        return msg

    def print_header(self):
        """
        print nice header as top of every generated output
        """
        msg = "#" * 80
        msg += '\n'
        msg = "#" * 80
        msg += '\n'
        msg += self.nice_print('#', '')
        msg += self.nice_print('#', '')
        msg += self.nice_print('# adm6:      ', 'Packetfilter generator for')
        msg += self.nice_print('#            ',
                               'Linux ip6tables and OpenBSD pf.conf')
        msg += self.nice_print('#', '')
        msg += self.nice_print('# License:   ',
                               'GPLv3 - General Public License version 3')
        msg += self.nice_print('#          ',
                               '                    or any later version')
        msg += self.nice_print('#', '')
        msg += self.nice_print('#', '')
        myversion = self.cfp.get('global', 'version')
        msg += self.nice_print('# Version:   ', myversion)
        config_timestamp = self.cfp.get('global', 'timestamp')
        msg += self.nice_print('# Date:      ', config_timestamp)
        msg += self.nice_print('# Author:    ', 'Johannes Hubertz')
        msg += self.nice_print('#', '')
        msg += self.nice_print('# Configuration of almost everything: ',
                               self.filename.strip())
        msg += self.nice_print('#', '')
        msg += self.nice_print(
            '# Copyright: ', '(c)2011-2013 Johannes Hubertz, ' +
            'Cologne, Germany, Europe, Earth')
        msg += self.nice_print('#', '')
        msg += self.nice_print('#', '')
        msg += "#" * 80
        return msg

    def print_all_headers(self):
        """
        print all device headers (for debug purposes only)
        """
        headers = self.print_header() + '\n'
        mydevs = self.get_devices().split(',')
        for device in mydevs:
            if self.get_apply(device):
                headers += self.print_head(device)
                headers += '\n'
        return headers

    def nice_print(self, title, mytext):
        """nice printout of a config line, only to impress the user
        used linelength: 70 characters"""
        rest_len = 78 - len(title) - len(mytext)
        msg = title + " " + mytext + " " * rest_len + "#"
        return msg + '\n'
コード例 #51
0
ファイル: bench.py プロジェクト: unibg-seclab/shuffleindex
                            config.get('s3', 'secret_key'),
                            config.get('s3', 'bucket_name')))
            for _ in xrange(S)
        ]

    elif args.TEST == 'ecs_s3':
        config = ConfigParser()
        config.read(args.config)
        statslayers = [
            remote_datalayer(
                ECSS3DataLayer(config.get('ecs_s3', 'access_key'),
                               config.get('ecs_s3', 'secret_key'),
                               config.get('ecs_s3', 'bucket_name'),
                               config.get('ecs_s3', 'host'),
                               config.getint('ecs_s3', 'port'),
                               config.getboolean('ecs_s3', 'is_secure')))
            for _ in xrange(S)
        ]

    elif args.TEST == 'ecs_swift':
        config = ConfigParser()
        config.read(args.config)
        statslayers = [
            remote_datalayer(
                ECSSwiftDataLayer(config.get('ecs_swift', 'authurl'),
                                  config.get('ecs_swift', 'username'),
                                  config.get('ecs_swift', 'key'),
                                  config.get('ecs_swift', 'namespace'),
                                  config.get('ecs_swift', 'bucket_name'),
                                  config.get('ecs_swift', 'auth_version')))
            for _ in xrange(S)
コード例 #52
0
ファイル: get_clean.py プロジェクト: folguinch/GoContinuum
def main():
    # Command line options
    parser = argparse.ArgumentParser()
    parser.add_argument('-c', nargs=1, help='Casa parameter.')
    parser.add_argument('--all_spws',
                        action='store_true',
                        help='Combine all spectral windows')
    parser.add_argument('--section',
                        nargs=1,
                        type=str,
                        default=['dirty'],
                        help='Configuration section name (default = dirty)')
    parser.add_argument('configfile',
                        nargs=1,
                        type=str,
                        help='Configuration file name')
    parser.add_argument('outputdir',
                        nargs=1,
                        type=str,
                        help='Output directory')
    parser.add_argument('uvdata',
                        nargs='*',
                        type=str,
                        help='UV data to estract dirty images')
    args = parser.parse_args()

    # Configuration file
    config = ConfigParser({
        'robust': '0.5',
        'deconvolver': 'hogbom',
        'specmode': 'cube',
        'outframe': 'LSRK',
        'gridder': 'standard',
        'interactive': 'False',
        'weighting': 'briggs',
        'niter': '0',
        'chancunks': '-1'
    })
    config.read(args.configfile[0])
    section = args.section[0]

    # Common arguments, add as needed
    float_keys = ['robust', 'pblimit', 'pbmask']
    int_keys = ['niter', 'chanchunks']
    bool_keys = ['interactive', 'parallel', 'pbcor']
    ignore_keys = ['vis', 'imagename', 'spw']
    tclean_pars = {}
    for key in tclean.parameters.keys():
        if key not in config.options(section) or key in ignore_keys:
            continue
        #Check for type:
        if key in float_keys:
            tclean_pars[key] = config.getfloat(section, key)
        elif key in int_keys:
            tclean_pars[key] = config.getint(section, key)
        elif key in bool_keys:
            tclean_pars[key] = config.getboolean(section, key)
        elif key == 'imsize':
            tclean_pars[key] = map(int, config.get(section, key).split())
        else:
            tclean_pars[key] = str(config.get(section, key))
    #field = config.get('dirty', 'field')
    #robust = config.getfloat('dirty', 'robust')
    #imsize = map(int, config.get('dirty', 'imsize').split())
    #cellsize = str(config.get('dirty', 'cellsize'))
    casalog.post('tclean non-default parameters: %r' % tclean_pars)

    for ms in args.uvdata:
        # Number of spws
        nspws = len(vishead(vis=ms, mode='list')['spw_name'][0])
        casalog.post('Processing ms: %s' % ms)
        casalog.post('Number of spws in ms %s: %i' % (ms, nspws))

        # Extract properties from ms file name
        msname = os.path.basename(ms.strip('/'))
        msname, ext = os.path.splitext(msname)

        # Cases:
        if args.all_spws or 'spw' in config.options(section):
            spw = ','.join(map(str, range(nspws)))
            if 'spw' in config.options(section) and \
                    config.get(section,'spw')!=spw:
                spw = config.options(section)
                imagename = '{0}/{1}.spw{2}.robust{3}'.format(
                    args.outputdir[0], msname, spw, tclean_pars['robust'])
            else:
                imagename = '{0}/{1}.robust{2}'.format(args.outputdir[0],
                                                       msname,
                                                       tclean_pars['robust'])
            casalog.post('Processing spw: %i' % spw)
            tclean(vis=ms, spw=spw, imagename=imagename, **tclean_pars)

            imagename = imagename + '.image'
            exportfits(imagename=imagename,
                       fitsimage=imagename + '.fits',
                       overwrite=True)
        else:
            for spw in range(nspws):
                casalog.post('Processing spw: %i' % spw)
                imagename = '{0}/{1}.spw{2}.robust{3}'.format(
                    args.outputdir[0], msname, spw, tclean_pars['robust'])
                casalog.post(imagename)
                tclean(
                    vis=ms,
                    #field = field,
                    spw='%i' % spw,
                    imagename=imagename,
                    **tclean_pars)
                #imsize = imsize,
                #cell = cellsize,
                #specmode = 'cube',
                #outframe = 'LSRK',
                #gridder = 'standard',
                #pblimit = 0.2,
                #deconvolver = deconvolver,
                #interactive = False,
                #weighting = 'briggs',
                #robust = robust,
                #niter = 0,
                #chanchunks = -1,
                #parallel = True,
                #threshold = '0.14mJy')

                imagename = imagename + '.image'
                exportfits(imagename=imagename,
                           fitsimage=imagename + '.fits',
                           overwrite=True)
コード例 #53
0
    def parser(self):
        """ parser func """
        try:
            if exists(self.__path) and isfile(self.__path):
                with open(self.__path) as fhandler:
                    try:
                        config_parser = ConfigParser()
                        config_parser.readfp(fhandler)
                        if config_parser.has_option('video', 'width'):
                            self.__config.video.width = \
                                    config_parser.getint('video', 'width')
                        if config_parser.has_option('video', 'height'):
                            self.__config.video.height = \
                                    config_parser.getint('video', 'height')
                        if config_parser.has_option('video', 'fps'):
                            self.__config.video.fps = \
                                    config_parser.getint('video', 'fps')
                        if config_parser.has_option('video', 'bitrate'):
                            self.__config.video.bitrate = \
                                    config_parser.getint('video', 'bitrate')
                        if config_parser.has_option('video', 'brightness'):
                            self.__config.video.brightness = \
                                    config_parser.getint('video', 'brightness')
                        if config_parser.has_option('video', 'rtsp_port'):
                            self.__config.video.rtsp_port = \
                                    config_parser.getint('video', 'rtsp_port')
                        if config_parser.has_option('record', 'base'):
                            self.__config.record.base = \
                                    config_parser.get('record', 'base')
                        if config_parser.has_option('record', 'cycle'):
                            self.__config.record.cycle = \
                                    config_parser.getboolean('record', 'cycle')
                        if config_parser.has_option('record', 'limit'):
                            self.__config.record.fsp_limit = \
                                    config_parser.getint('record', 'limit')
                        if config_parser.has_option('common', 'tcp_port'):
                            self.__config.comm_port.tcp_port = \
                                    config_parser.getint('common', 'tcp_port')
                        if config_parser.has_option('common', 'http_port'):
                            self.__config.comm_port.http_port = \
                                    config_parser.getint('common', 'http_port')
                        if config_parser.has_option('common', 'vod_port'):
                            self.__config.comm_port.vod_port = \
                                    config_parser.getint('common', 'vod_port')

                        # assign default values if value is invalid
                        if not self.__config.comm_port.http_port:
                            self.__config.comm_port.http_port = 8080
                        if not self.__config.comm_port.tcp_port:
                            self.__config.comm_port.tcp_port = 9999
                        if not self.__config.comm_port.vod_port:
                            self.__config.comm_port.vod_port = 9001
                        self.__config.comm_port.address = get_local_ip()

                    except (AppException, ConfigError) as ex:
                        APPLOGGER.error(ex)
            else:
                raise AppException('config file error')
        except OSError as ex:
            APPLOGGER.error(ex)
        return self.__config
コード例 #54
0
    SECTION, 'air_density_update_secs')

# For tyre-driven trainers, the wheel circumference in meters (2.122 for Continental Home trainer tyre)
POWER_CALCULATOR.wheel_circumference = CONFIG.getfloat(SECTION,
                                                       'wheel_circumference')

# Overall correction factor, e.g. to match a user's power meter on another bike
POWER_CALCULATOR.set_correction_factor(
    CONFIG.getfloat(SECTION, 'correction_factor'))

# ANT+ ID of the virtual power sensor
# The expression below will choose a fixed ID based on the CPU's serial number
POWER_SENSOR_ID = int(int(hashlib.md5(getserial()).hexdigest(), 16)
                      & 0xfffe) + 1

# If set to True, the stick's driver will dump everything it reads/writes from/to the stick.
DEBUG = CONFIG.getboolean(SECTION, 'debug')

POWER_CALCULATOR.set_debug(DEBUG or VPOWER_DEBUG)

# Set to None to disable ANT+ message logging
LOG = None
# LOG = log.LogWriter(filename="vpower.log")

# ANT+ network key
NETKEY = '\xB9\xA5\x21\xFB\xBD\x72\xC3\x45'

if LOG:
    print "Using log file:", LOG.filename
    print ""
コード例 #55
0
class WisMon(object):
    def __init__(self, working_dir):

        self.working_dir = working_dir
        self.config_file = os.path.join(self.working_dir, 'config',
                                        'wismon.cfg')
        if not (os.path.exists(self.config_file)
                and os.path.isfile(self.config_file)):
            raise WmError('Config file not exists: %s' % self.config_file)
        self.config = ConfigParser()
        self.config.optionxform = str  # preserve case
        self.config.read(os.path.join(self.working_dir, 'config',
                                      'wismon.cfg'))

        self.gisc_name = self.config.get('monitor', 'centre')
        self.time_now = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')

        self.data_dir = os.path.join(self.working_dir, 'data')
        self.log_dir = os.path.join(self.working_dir, 'logs')
        self.json_dir = os.path.join(self.data_dir, 'JSON')

        self.n_messages_retain = self.config.getint('system',
                                                    'n_messages_retain')

        # Set up the logging file
        log_handler = logging.handlers.RotatingFileHandler(os.path.join(
            self.log_dir, 'wismon.log'),
                                                           maxBytes=1048576,
                                                           backupCount=5)

        log_handler.setFormatter(
            logging.Formatter(
                '%(asctime)s %(levelname)s [%(funcName)s] - %(message)s'))
        LOGGER.addHandler(log_handler)

        level = self.config.get('system', 'logging_level')
        try:
            level = logging._levelNames[level.upper()]
            LOGGER.setLevel(level)
        except NameError:
            LOGGER.warning('invalid logging level: %s' % level)

        # Initialize the database after logging is configured so logging
        # messages are properly directed.
        self.wismon_db = WisMonDB(os.path.join(self.data_dir,
                                               'wismon.sqlite3'))

    def config_get_with_default(self, section_name, option_name, default=None):
        if self.config.has_section(section_name) and self.config.has_option(
                section_name, option_name):
            return self.config.get(section_name, option_name)
        else:
            return default

    def monitor_cache_json_gen(self, force_regen=False):
        """
        Generate json messages for Monitor and Cache JSON
        """

        date_now = '{0}T00:00:00Z'.format(self.time_now[:10])

        if self.wismon_db.json_exists('monitor', date_now):
            if force_regen:
                LOGGER.info(
                    'Re-generate JSON files for date: {0}'.format(date_now))
                self.wismon_db.restore_metadata(date_now)
                self.json_del(MONITOR_JSON_NAME, date_now)
                self.json_del(CACHE_JSON_NAME, date_now)
            else:
                raise WmError(
                    'JSON messages already exist for date: {0}'.format(
                        date_now))

        else:
            LOGGER.info(
                'Creating JSON messages for date: {0}'.format(date_now))

        # Create JSON file objects
        monitor_json = MonitorJSON(self.gisc_name, date_now)
        cache_json = CacheJSON(self.gisc_name, date_now)

        # Read the category name for WIMMS set
        wimms_name = self.config_get_with_default('monitor', 'WIMMS_name', '')

        LOGGER.info('Sending query to OpenWIS DB ...')
        rows = query_openwis(host=self.config.get('system', 'openwis_db_host'),
                             port=self.config.getint('system',
                                                     'openwis_db_port'),
                             database=self.config.get('system',
                                                      'openwis_db_name'),
                             user=self.config.get('system', 'openwis_db_user'),
                             password=self.config.get('system',
                                                      'openwis_db_pass'))

        # Save data from the previous day
        # TODO: Somehow the alter and create table statements have to be executed as a single
        #       script. Otherwise, the table will not be created after the alter statement.
        LOGGER.info(
            "Saving snapshot of the metadata catalogue from previous day ...")
        self.wismon_db.archive_metadata()
        LOGGER.info('Saving new snapshot of the metadata catalogue ...')
        self.wismon_db.save_metadata(rows)

        LOGGER.info('Querying for overall metadata stats ...')
        stats = self.wismon_db.group_by_metadata_status(
            "category LIKE 'WIS-GISC-%' OR category IN ('{0}', '{1}')".format(
                'draft', wimms_name))
        monitor_json.set('metrics.metadata_catalogue.number_of_metadata',
                         stats[NON_DRAFT].n_metadata)
        monitor_json.set('metrics.cache_24h.number_of_product_instances',
                         stats[NON_DRAFT].n_mapped_files)
        monitor_json.set(
            'metrics.cache_24h.number_of_product_instances_missing_metadata',
            stats[DRAFT].n_mapped_files)
        monitor_json.set('metrics.cache_24h.size_of_cache',
                         stats[NON_DRAFT].size)
        monitor_json.set(
            'metrics.cache_24h.size_of_product_instances_missing_metadata',
            stats[DRAFT].size)
        monitor_json.set(
            'metrics.cache_24h.number_of_unique_products_missing_metadata',
            stats[DRAFT].n_metadata)

        # Get the urn patterns
        urn_patterns = {}
        try:
            for centre_name in self.config.options('cache'):
                urn_patterns[centre_name] = self.config.get('cache',
                                                            centre_name,
                                                            raw=True).split()
        except NoSectionError:
            pass

        LOGGER.info('Querying for AMDCN metadata stats ...')
        number_of_unique_products_missing_metadata_AoR = 0
        for centre_name, patterns in urn_patterns.items():
            centre_idx = cache_json.new_member()
            cache_json.set('centres[{0}].centre'.format(centre_idx),
                           centre_name)

            where_expr = "(category LIKE 'WIS-GISC-%' OR category IN ('{0}', '{1}')) AND ({2})".format(
                'draft', wimms_name,
                ' OR '.join("uuid REGEXP '{0}'".format(p) for p in patterns))
            stats = self.wismon_db.group_by_metadata_status(where_expr)
            cache_json.set(
                'centres[{0}].metrics.number_of_product_instances'.format(
                    centre_idx), stats[NON_DRAFT].n_mapped_files)
            cache_json.set(
                'centres[{0}].metrics.size_of_product_instances'.format(
                    centre_idx), stats[NON_DRAFT].size)
            cache_json.set(
                'centres[{0}].metrics.number_of_product_instances_missing_metadata'
                .format(centre_idx), stats[DRAFT].n_mapped_files)
            cache_json.set(
                'centres[{0}].metrics.size_of_product_instances_missing_metadata'
                .format(centre_idx), stats[DRAFT].size)
            cache_json.set(
                'centres[{0}].metrics.number_of_unique_products_missing_metadata'
                .format(centre_idx), stats[DRAFT].n_metadata)
            number_of_unique_products_missing_metadata_AoR += stats[
                DRAFT].n_metadata
            cache_json.set(
                'centres[{0}].metrics.number_of_metadata'.format(centre_idx),
                stats[NON_DRAFT].n_mapped_files)

        monitor_json.set(
            'metrics.cache_24h.number_of_unique_products_missing_metadata_AoR',
            number_of_unique_products_missing_metadata_AoR)

        LOGGER.info('Checking self service status ...')
        portal_url = self.config.get('monitor', 'portal_url')
        monitor_json.set('metrics.services.portal.status',
                         ping_url(portal_url) >= 0)

        # Check whether OAI-PMH server is up
        oaipmh_url = self.config.get('monitor', 'oaipmh_url')
        monitor_json.set('metrics.services.oaipmh.status',
                         ping_oaipmh(oaipmh_url) >= 0)

        sru_url = self.config.get('monitor', 'sru_url')
        monitor_json.set('metrics.services.sru.status', ping_url(sru_url) >= 0)

        distribution_url = self.config.get('monitor', 'distribution_url')
        monitor_json.set('metrics.services.distribution_system.status',
                         ping_url(distribution_url) >= 0)

        monitor_json.set('gisc_properties.portal_url', portal_url)
        monitor_json.set('gisc_properties.oaipmh_url', oaipmh_url)
        monitor_json.set('gisc_properties.sru_url', sru_url)
        monitor_json.set('gisc_properties.monitor_url',
                         self.config.get('monitor', 'monitor_url') or None)
        monitor_json.set('gisc_properties.cache_url',
                         self.config.get('monitor', 'cache_url'))
        monitor_json.set('gisc_properties.centres_url',
                         self.config.get('monitor', 'centres_url'))
        monitor_json.set('gisc_properties.events_url',
                         self.config.get('monitor', 'events_url'))
        monitor_json.set('gisc_properties.backup_giscs', [
            x.strip()
            for x in self.config.get('monitor', 'backup_giscs').split(',')
        ])
        monitor_json.set('gisc_properties.rmdcn.main',
                         self.config.get('monitor', 'rmdcn.main'))
        monitor_json.set('gisc_properties.rmdcn.sub',
                         self.config.get('monitor', 'rmdcn.sub'))
        monitor_json.set('gisc_properties.rmdcn.DR_main',
                         self.config.get('monitor', 'rmdcn.DR_main'))
        monitor_json.set('gisc_properties.contact_info.voice',
                         self.config.get('monitor', 'contact_info.voice'))
        monitor_json.set('gisc_properties.contact_info.email',
                         self.config.get('monitor', 'contact_info.email'))

        LOGGER.info('Querying stats for new and modified metadata ...')
        monitor_json.set(
            'metrics.metadata_catalogue.number_of_changes_insert_modify',
            self.wismon_db.stats_inserted_modified(wimms_name))
        LOGGER.info('Querying stats for deleted metadata ...')
        monitor_json.set('metrics.metadata_catalogue.number_of_changes_delete',
                         self.wismon_db.stats_deleted(wimms_name))

        monitor_json.set('remarks', self.wismon_db.remarks_get())

        # Metadata breakdown stats
        try:
            if self.config.getboolean('analysis', 'metadata_source_breakdown'):
                LOGGER.info('Calculating metadata source breakdown stats')
                self.wismon_db.calc_metadata_breakdown(date_now)
        except (NoSectionError, NoOptionError):
            pass

        LOGGER.info('Saving JSON messages to files')
        monitor_json.to_file(
            os.path.join(self.json_dir, '{0}.json'.format(MONITOR_JSON_NAME)))
        cache_json.to_file(
            os.path.join(self.json_dir, '{0}.json'.format(CACHE_JSON_NAME)))

        LOGGER.info('Saving JSON messages to local database')
        self.wismon_db.json_save(MONITOR_JSON_NAME, date_now, monitor_json)
        self.wismon_db.json_save(CACHE_JSON_NAME, date_now, cache_json)

        if self.n_messages_retain >= 0:
            self.wismon_db.json_throttle(MONITOR_JSON_NAME,
                                         self.n_messages_retain)
            self.wismon_db.json_throttle(CACHE_JSON_NAME,
                                         self.n_messages_retain)

        return monitor_json, cache_json

    def centres_json_gen(self, force_regen=False):
        import threading

        time0_now = '{0}00Z'.format(self.time_now[:17])

        if self.wismon_db.json_exists(CENTRES_JSON_NAME, time0_now):
            if force_regen:
                LOGGER.info(
                    'Re-generate Centres JSON for datetime: {0}'.format(
                        time0_now))
                self.wismon_db.json_del(CENTRES_JSON_NAME, time0_now)
            else:
                raise WmError(
                    'Centres JSON already exists for datetime: {0}'.format(
                        time0_now))
        else:
            LOGGER.info(
                'Creating Centres JSON for datetime: {0}'.format(time0_now))

        centres_json = CentresJSON(self.gisc_name, time0_now)

        n_threads = self.config.getint('system', 'n_threads')
        LOGGER.info(
            'About to run {0} threads to ping service URLs of WIS Centres ...'.
            format(n_threads))
        centres_sections = [
            name for name in self.config.sections()
            if name.startswith('centres-')
        ]

        def f(s, url, path_to_json_element):
            if url is None or url.strip() == '':
                res = None
            else:
                LOGGER.info('Ping {0}'.format(url))
                with s:
                    res = ping_url(url, timeout=20, n_retries=3)
            centres_json.set(path_to_json_element, res)

        semaphore = threading.Semaphore(n_threads)
        all_threads = []
        for section_name in centres_sections:
            idx_centre = centres_json.new_member()
            centres_json.set('centres[{0}].centre'.format(idx_centre),
                             self.config.get(section_name, 'name'))
            for option_name_stub in ('portal', 'oaipmh', 'sru'):
                t = threading.Thread(
                    target=f,
                    args=(semaphore,
                          self.config_get_with_default(
                              section_name,
                              '{0}_url'.format(option_name_stub)),
                          'centres[{0}].metrics.{1}_response_time'.format(
                              idx_centre, option_name_stub)))
                t.start()
                all_threads.append(t)

        for idx, t in enumerate(all_threads):
            t.join()

        centres_json.to_file(
            os.path.join(self.json_dir, '{0}.json'.format(CENTRES_JSON_NAME)))

        try:
            self.wismon_db.json_save(CENTRES_JSON_NAME, time0_now,
                                     centres_json)
        except OperationalError as e:
            LOGGER.warn('Database error: {}. Retry in 60 seconds'.format(e))
            time.sleep(60)
            self.wismon_db.json_save(CENTRES_JSON_NAME, time0_now,
                                     centres_json)

        if self.n_messages_retain >= 0:
            self.wismon_db.json_throttle(CENTRES_JSON_NAME,
                                         self.n_messages_retain)

        return centres_json

    def events_json_gen(self, force_regen=False):
        if self.wismon_db.json_exists(EVENTS_JSON_NAME, self.time_now):
            if force_regen:
                LOGGER.info('Re-generate Events JSON for datetime: {0}'.format(
                    self.time_now))
                self.wismon_db.json_del(EVENTS_JSON_NAME, self.time_now)
            else:
                raise WmError(
                    'Events JSON already exists for datetime: {0}'.format(
                        self.time_now))
        else:
            LOGGER.info('Creating Events JSON for datetime: {0}'.format(
                self.time_now))

        events_json = EventsJSON(self.gisc_name, self.time_now)
        # Events JSON
        LOGGER.info('Gathering events ...')
        for _, title, text, start_datetime_string, end_datetime_string in self.wismon_db.events_get(
                self.time_now):
            idx_event = events_json.new_member()
            events_json.set('events[{0}].id'.format(idx_event), idx_event + 1)
            events_json.set('events[{0}].title'.format(idx_event), title)
            events_json.set('events[{0}].text'.format(idx_event), text)
            events_json.set('events[{0}].start'.format(idx_event),
                            start_datetime_string)
            events_json.set('events[{0}].end'.format(idx_event),
                            end_datetime_string)

        events_json.to_file(
            os.path.join(self.json_dir, '{0}.json'.format(EVENTS_JSON_NAME)))
        self.wismon_db.json_save(EVENTS_JSON_NAME, self.time_now, events_json)

        if self.n_messages_retain >= 0:
            self.wismon_db.json_throttle(EVENTS_JSON_NAME,
                                         self.n_messages_retain)

        return events_json

    def json_get(self, name, datetime_string):
        datetime_string = get_uniform_datetime_string(datetime_string)
        row = self.wismon_db.json_get(name, datetime_string)
        if row:
            return json.loads(row[3])
        else:
            raise WmError('No {0} JSON message for datetime: {1}'.format(
                name, datetime_string or 'Most Recent'))

    def json_del(self, name, datetime_string):
        datetime_string = get_uniform_datetime_string(datetime_string)
        count = self.wismon_db.json_del(name, datetime_string)
        if count == 0:
            raise WmError('No {0} JSON messages for datetime: {1}'.format(
                name, datetime_string or 'Most Recent'))

    def event_add(self,
                  start_datetime_string,
                  end_datetime_string,
                  title,
                  text=''):
        start_datetime_string = get_uniform_datetime_string(
            start_datetime_string)
        end_datetime_string = get_uniform_datetime_string(end_datetime_string)
        LOGGER.info('Adding event: {0}'.format(title))
        self.wismon_db.event_add(start_datetime_string, end_datetime_string,
                                 title, text)

    def event_get(self, datetime_string):
        datetime_string = get_uniform_datetime_string(datetime_string)
        rows = self.wismon_db.events_get(datetime_string)
        if rows:
            return [{
                'id': eid,
                'title': title,
                'text': text or '',
                'start_datetime_string': sd,
                'end_datetime_string': ed
            } for eid, title, text, sd, ed in rows]
        else:
            raise WmError('No event for datetime: {0}'.format(datetime_string))

    def event_del(self, eid):
        count = self.wismon_db.event_del(eid)
        if count == 0:
            raise WmError('No event of id: {0}'.format(eid))

    def remarks_set(self, text):
        self.wismon_db.remarks_set(text)

    def remarks_get(self):
        row = self.wismon_db.remarks_get()
        if row is not None:
            return row[0]
        else:
            raise WmError('No remarks is found')

    @staticmethod
    def init_working_directory(working_directory):
        config_dir = os.path.join(working_directory, 'config')
        if not os.path.exists(config_dir):
            os.makedirs(config_dir)

        with open(os.path.join(BASE_DIR, 'config_template.cfg')) as ins:
            config_template = ins.read()

        with open(os.path.join(config_dir, 'wismon.cfg'), 'w') as outs:
            outs.write(config_template)

        json_dir = os.path.join(working_directory, 'data', 'JSON')
        if not os.path.exists(json_dir):
            os.makedirs(json_dir)

        log_dir = os.path.join(working_directory, 'logs')
        if not os.path.exists(log_dir):
            os.mkdir(log_dir)
コード例 #56
0
ファイル: WisMon.py プロジェクト: imousmoutis/openwis-clone
class WisMon(object):
    def __init__(self, working_dir):

        self.working_dir = working_dir
        self.config_file = os.path.join(self.working_dir, 'config',
                                        'wismon.cfg')
        if not (os.path.exists(self.config_file)
                and os.path.isfile(self.config_file)):
            raise WmError('Config file not exists: %s' % self.config_file)
        self.config = ConfigParser()
        self.config.optionxform = str  # preserve case
        self.config.read(os.path.join(self.working_dir, 'config',
                                      'wismon.cfg'))

        self.data_dir = os.path.join(self.working_dir, 'data')
        if not os.path.exists(self.data_dir):
            os.mkdir(self.data_dir)
        self.log_dir = os.path.join(self.working_dir, 'logs')
        if not os.path.exists(self.log_dir):
            os.mkdir(self.log_dir)
        self.json_dir = os.path.join(self.data_dir, 'JSON')
        if not os.path.exists(self.json_dir):
            os.mkdir(self.json_dir)

        self.wismon_db_file = os.path.join(self.data_dir, 'wismon.sqlite3')

        # Set up the logging file
        log_handler = logging.handlers.RotatingFileHandler(os.path.join(
            self.log_dir, 'wismon.log'),
                                                           maxBytes=1048576,
                                                           backupCount=5)

        log_handler.setFormatter(
            logging.Formatter(
                '%(asctime)s %(levelname)s [%(funcName)s] - %(message)s'))
        logger.addHandler(log_handler)

        level = self.config.get('system', 'logging_level')
        try:
            level = logging._levelNames[level.upper()]
            logger.setLevel(level)
        except NameError:
            logger.warning('invalid logging level: %s' % level)

    def json_gen(self, force_regen=False):

        gisc_name = self.config.get('monitor', 'centre')
        now = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
        date_now = now[:10]

        centre_patterns = {}
        try:
            for item in self.config.options('centre'):
                centre_patterns[item] = self.config.get('centre',
                                                        item,
                                                        raw=True).split()
        except NoSectionError:
            pass

        with connect_wismondb(self.wismon_db_file) as (conn, cursor):

            # Do nothing if JSON messages of the day already exist in database
            qrs = cursor.execute(sql_json_get, (date_now, ))
            if qrs.fetchall():
                if force_regen:
                    logger.info('Re-generate JSON files for date %s' %
                                date_now)
                    cursor.execute(sql_json_del, (date_now, ))
                    cursor.execute(
                        'DELETE FROM wismon_md_breakdown WHERE date = ?',
                        (date_now, ))
                    cursor.executescript(
                        'DROP TABLE IF EXISTS wismon_metadata;\n' +
                        'ALTER TABLE old_wismon_metadata RENAME TO wismon_metadata;\n'
                    )
                else:
                    raise WmError('JSON messages for day %s already exist' %
                                  date_now)
            else:
                logger.info('Creating JSON messages for date %s' % date_now)

            with connect_openwisdb(
                    host=self.config.get('system', 'openwis_db_host'),
                    port=self.config.getint('system', 'openwis_db_port'),
                    database=self.config.get('system', 'openwis_db_name'),
                    user=self.config.get('system', 'openwis_db_user'),
                    password=self.config.get(
                        'system', 'openwis_db_pass')) as (ow_conn, ow_cursor):
                logger.info('Sending query to openwis db ...')
                ow_cursor.execute(sql_query_to_openwis)
                rows = ow_cursor.fetchall()
                logger.info('Done')

            # Save data from the previous day
            # TODO: Somehow the alter and create table statements have to be executed as a single
            #       script. Otherwise, the table will not be created after the alter statement.
            logger.info("Saving data from previous day")
            cursor.executescript("""
            DROP TABLE IF EXISTS old_wismon_metadata;
            ALTER TABLE wismon_metadata RENAME TO old_wismon_metadata;
            %s
            """ % sql_schema_wismon_metadata)

            logger.info('Saving query results ...')
            cursor.executemany(sql_save_snapshot, rows)
            conn.commit()
            logger.info('Done')

            logger.info('Querying for global metadata stats')
            qrs_concrete = qrs_draft = None
            # Calculate metadata stats by grouping as draft and non-draft
            for row in cursor.execute(sql_global_stats).fetchall():
                if row[3] == 0:  # this is the non-draft (0 is false) row
                    qrs_concrete = row
                else:
                    qrs_draft = row
            # In theory, both draft and non-draft stats could be null if no such metadata available in catalogue
            # If draft exists
            if qrs_draft:
                n_uniq_products_draft, n_products_draft, size_cache_draft, _ = qrs_draft
            else:  # otherwise set draft related stats to zero
                n_uniq_products_draft = n_products_draft = size_cache_draft = 0
            # If non-draft exists
            if qrs_concrete:
                # Number of total metadata are draft plus non-draft
                n_uniq_products = n_uniq_products_draft + qrs_concrete[0]
                n_products = n_products_draft + qrs_concrete[1]
                size_cache = size_cache_draft + qrs_concrete[2]
            else:  # if no non-draft available, stats of total is the same as stats of draft
                n_uniq_products = n_products = size_cache = n_uniq_products_draft, n_products_draft, size_cache_draft

            # centres.json
            logger.info('Querying for AMDCN metadata stats')
            centres_json = CentresJSON(gisc_name, now)
            amdcn_n_uniq_products_draft = 0
            for ct_name, patterns in centre_patterns.items():
                where_expr = ' OR '.join("uuid REGEXP '%s'" % p
                                         for p in patterns)
                ct_qrs_concrete = ct_qrs_draft = None
                for row in cursor.execute(sql_amdcn_stats %
                                          where_expr).fetchall():
                    if row[3] == 0:
                        ct_qrs_concrete = row
                    else:
                        ct_qrs_draft = row
                if ct_qrs_draft:
                    ct_n_uniq_products_draft, ct_n_products_draft, ct_size_cache_draft, _ = ct_qrs_draft
                else:
                    ct_n_uniq_products_draft = ct_n_products_draft = ct_size_cache_draft = 0
                if ct_qrs_concrete:
                    ct_n_uniq_products = ct_n_uniq_products_draft + ct_qrs_concrete[
                        0]
                    ct_n_products = ct_n_products_draft + ct_qrs_concrete[1]
                    ct_size_cache = ct_size_cache_draft + ct_qrs_concrete[2]
                else:
                    ct_n_uniq_products = ct_n_products = ct_size_cache = 0
                amdcn_n_uniq_products_draft += ct_n_uniq_products_draft

                centres_json.add_centre(ct_name, ct_n_products, ct_size_cache)

            # monitor.json
            monitor_json = MonitorJSON(gisc_name, now)
            logger.info('Checking status of oai-pmh server')
            # Check whether oai server is up
            oai_url = self.config.get('monitor', 'oai_url')
            try:
                req = urllib2.urlopen('%s?verb=Identify' % oai_url)
                oai_status = True if 200 <= req.getcode() < 300 else False
            except Exception:
                oai_status = False

            logger.info('Checking status of openwis portal')
            catalogue_url = self.config.get('monitor', 'catalogue_url')
            try:
                req = urllib2.urlopen(catalogue_url)
                catalogue_status = True if 200 <= req.getcode(
                ) < 300 else False
            except Exception:
                catalogue_status = False

            logger.info('Checking status of dissemination server')
            dissemination_url = self.config.get('monitor', 'dissemination_url')
            try:
                req = urllib2.urlopen(dissemination_url)
                dissemination_status = True if 200 <= req.getcode(
                ) < 300 else False
            except Exception:
                dissemination_status = False

            monitor_json.metric_services(
                oai_pmh={'status': oai_status},
                catalogue={'status': catalogue_status},
                distribution_system={'status': dissemination_status})

            monitor_json.gisc_properties(
                catalogue_url=catalogue_url,
                oai_url=oai_url,
                centres_inAoR_url=self.config.get('monitor',
                                                  'centres_inAoR_url'),
                events_url=self.config.get('monitor', 'events_url'),
                monitor_url=self.config.get('monitor', 'monitor_url'),
                backup_giscs=[
                    x.strip() for x in self.config.get(
                        'monitor', 'backup_giscs').split(',')
                ],
                contact_info={
                    'voice': self.config.get('monitor', 'contact_info_voice'),
                    'email': self.config.get('monitor', 'contact_info_email')
                })

            rmdcn_stats_url = self.config.get('monitor', 'rmdcn')
            if rmdcn_stats_url:
                monitor_json.metrics_rmdcn(rmdcn_stats_url)

            logger.info(
                'Counting number of records from GISC-specific sets (WIS-GISC-CITYNAME + WIMMS)'
            )
            # Try read the category name for WIMMS set
            try:
                wimms_name = self.config.get('monitor', 'WIMMS_name').strip()
            except NoOptionError:
                wimms_name = ''

            # An empty WIMMS name acts like a dummy for the query
            _sql = sql_md_total_GISC_specific % wimms_name

            qrs = cursor.execute(_sql)
            monitor_json.metrics_catalogue(number_of_records=qrs.fetchone()[0])

            # If stats from previous day exists, we can calculate the traffic.
            # These stats also only count GISC specific sets and WIMMS
            logger.info('Comparing for new and modified metadata')
            _sql = sql_md_insert_modify % wimms_name
            qrs = cursor.execute(_sql)
            n_insert_modify = qrs.fetchone()[0]
            logger.info('Comparing for deleted metadata')
            _sql = sql_md_deleted % wimms_name
            qrs = cursor.execute(_sql)
            n_delete = qrs.fetchone()[0]

            monitor_json.metrics_catalogue(
                number_of_changes_insert_modify=n_insert_modify,
                number_of_changes_delete=n_delete)

            # logger.info('Deleting data from previous day')
            # cursor.execute("DROP TABLE IF EXISTS old_wismon_metadata;")

            monitor_json.metrics_cache(
                number_of_products_all=n_products,
                number_of_products_without_metadata=n_products_draft,
                bytes_of_cache_all=size_cache,
                bytes_of_cache_without_metadata=size_cache_draft,
                number_of_unique_products_without_metadata_AMDCN=
                amdcn_n_uniq_products_draft,
                number_of_unique_products_without_metadata_all=
                n_uniq_products_draft)

            qrs = cursor.execute(sql_remarks_get)
            remarks = qrs.fetchone()
            if remarks:
                monitor_json.remarks(remarks[0])

            # events.json
            logger.info('Gathering events')
            events_json = EventsJSON(gisc_name, now)
            qrs = cursor.execute(sql_event_get, (now, ))
            for _, title, text, startdatetime, enddatetime in qrs.fetchall():
                events_json.add_event(startdatetime, enddatetime, title, text)

            logger.info('Saving JSON messages to files')
            with open(os.path.join(self.json_dir, 'monitor.json'),
                      'w') as outs:
                outs.write(monitor_json.serialize(indent=4))
            with open(os.path.join(self.json_dir, 'centres.json'),
                      'w') as outs:
                outs.write(centres_json.serialize(indent=4))
            with open(os.path.join(self.json_dir, 'events.json'), 'w') as outs:
                outs.write(events_json.serialize(indent=4))

            logger.info('Saving JSON messages to local database')
            cursor.execute(sql_save_json,
                           (date_now, monitor_json.serialize(),
                            centres_json.serialize(), events_json.serialize()))

            # Metadata breakdown stats
            self.metadata_source_breakdown(cursor, date_now)

            conn.commit()
            return monitor_json, centres_json, events_json

    def json_get(self, date=None, name='monitor'):
        if date is None:
            date = datetime.utcnow().strftime('%Y-%m-%d')
        with connect_wismondb(self.wismon_db_file) as (conn, cursor):
            logger.info('Getting JSON message %s for date %s' % (name, date))
            row = cursor.execute(sql_json_get, (date, )).fetchone()
        if row:
            indices = {'monitor': 1, 'centres': 2, 'events': 3}
            try:
                msg = row[indices[name]]
            except NameError:
                raise WmError('No JSON message of name: %s' % name)
            return json.loads(msg)
        else:
            raise WmError('No JSON message for date: %s' % date)

    def json_del(self, date):
        with connect_wismondb(self.wismon_db_file) as (conn, cursor):
            logger.info('Deleting JSON message for date %s' % date)
            cursor.execute(sql_json_del, (date, ))
            count = cursor.rowcount
            conn.commit()
        if count == 0:
            raise WmError('No JSON messages for date: %s' % date)

    def event_add(self, startdatetime, enddatetime, title, text=''):
        try:
            datetime.strptime(startdatetime, '%Y-%m-%dT%H:%M:%SZ')
            datetime.strptime(enddatetime, '%Y-%m-%dT%H:%M:%SZ')
        except ValueError:
            raise WmError(
                'Datetime format must conform to ISO 8601 (YYYY-MM-DDThh:mm:ssZ)'
            )

        with connect_wismondb(self.wismon_db_file) as (conn, cursor):
            logger.info('Adding event %s' % title)
            cursor.execute(sql_event_add,
                           (title, text, startdatetime, enddatetime))
            conn.commit()

    def event_get(self, dt=None):
        if dt is None:
            dt = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
        with connect_wismondb(self.wismon_db_file) as (_, cursor):
            logger.info('Getting event for datetime %s' % dt)
            rows = cursor.execute(sql_event_get, (dt, )).fetchall()
        if rows:
            return rows
        else:
            raise WmError('No event for datetime %s' % dt)

    def event_del(self, eid):
        with connect_wismondb(self.wismon_db_file) as (conn, cursor):
            logger.info('Deleting event of id %s' % eid)
            cursor.execute(sql_event_del, (eid, ))
            count = cursor.rowcount
            conn.commit()
        if count == 0:
            raise WmError('No event of id: %s' % eid)

    def remarks_set(self, text):
        with connect_wismondb(self.wismon_db_file) as (conn, cursor):
            logger.info('Setting new remarks')
            cursor.execute(sql_remarks_set, (text, ))
            conn.commit()

    def remarks_get(self):
        with connect_wismondb(self.wismon_db_file) as (conn, cursor):
            logger.info('Getting remarks')
            row = cursor.execute(sql_remarks_get).fetchone()
        if row is not None:
            return row[0]
        else:
            raise WmError('No remarks is found')

    def metadata_source_breakdown(self, cursor, date):
        try:
            if self.config.getboolean('analysis', 'metadata_source_breakdown'):
                logger.info('Calculating metadata source breakdown stats')
                qstr = sql_calc_md_source_breakdown.format(date)
                cursor.executescript(qstr)
        except (NoSectionError, NoOptionError):
            pass

    @staticmethod
    def init_working_directory(working_directory):

        config_dir = os.path.join(working_directory, 'config')
        if not os.path.exists(config_dir):
            os.makedirs(config_dir)
        with open(os.path.join(config_dir, 'wismon.cfg'), 'w') as outs:
            outs.write(CONFIG_TEMPLATE)

        json_dir = os.path.join(working_directory, 'data', 'JSON')
        if not os.path.exists(json_dir):
            os.makedirs(json_dir)
        log_dir = os.path.join(working_directory, 'logs')
        if not os.path.exists(log_dir):
            os.mkdir(log_dir)
コード例 #57
0
# Django settings for gruvdrift project.
from os import environ
from ConfigParser import ConfigParser

local_cfg = ConfigParser()
if environ.has_key('GDHOME'):
    local_cfg.read("%s/settings-local.conf" % environ['GDHOME'])
else:
    local_cfg.read("settings-local.conf")

DEBUG = local_cfg.getboolean('devel', 'debug')
LDEVPATH = local_cfg.getboolean('devel', 'ldevpath')

#DEBUG = False
TEMPLATE_DEBUG = DEBUG
FORCE_SCRIPT_NAME = ""

ADMINS = (
    # ('Your Name', '*****@*****.**'),
)

MANAGERS = ADMINS
LOGIN_URL = "/auth/"

DATABASES = {
    'default': {
        # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
        'ENGINE': 'django.db.backends.%s' % local_cfg.get('db', 'type'),
        'NAME':
        local_cfg.get('db',
                      'name'),  # Or path to database file if using sqlite3.
コード例 #58
0
ファイル: setup_parser.py プロジェクト: jcao1022/tiflash
class TestSetup(object):
    """Class used for accessing various settings in test setup configuartion
    file: setup.cfg
    """
    def __init__(self):
        self.cfg = ConfigParser(allow_no_value=True)
        self.cfg.optionxform = str
        self.cfg.read("./setup.cfg")

    def get_ccs_prefix(self):
        """Returns the set ccs_prefix

        Returns:
            str: ccs_prefix variable set in setup.cfg
        """
        return self.cfg.get('environment', 'ccs_prefix')

    def get_ccs_versions(self):
        """Returns a tuple of CCS versions installed

        Returns:
            tuple: a tuple of ints representing CCS versions installed in test
              setup
        """
        versions = map(str.strip,
                       self.cfg.get('environment', 'ccs_versions').split(','))

        return tuple(versions)

    def get_ccs_installs(self):
        """Returns a tuple of all CCS install paths

        Returns:
            tuple: a tuple of strs being the full paths to each CCS
              installation
        """
        system = platform.system()
        versions = self.get_ccs_versions()

        ccs_paths = map(str.strip,
                        self.cfg.get('environment', 'ccs_installs').split(','))
        ccs_paths = tuple(ccs_paths)

        for path in ccs_paths:
            if not os.path.exists(path):
                raise TestSetupError("CCS Install: %s could not be found. "
                                     "Remove this ccs version from setup.cfg" %
                                     path)
        return tuple(ccs_paths)

    def get_target_config_directory(self):
        """Returns the target configuation directory

        Returns:
            str: Path to target configuration directory
        """

        ccxml_dir = self.cfg.get("environment", "ccxml_dir")

        if not os.path.exists(ccxml_dir):
            raise TestSetupError("Target Config Directory: %s could not"
                                 " be found." % ccxml_dir)
        return ccxml_dir

    def get_devices(self):
        """Returns a dict of devices with specified configurations (devices.cfg)

        Returns:
            dict: dict of device dicts in format:
                { devicename:
                    {
                        serno: SERNO,
                        connection: CONN,
                        devicetype: DEVTYPE
                    }
                }
        """
        devices = dict()

        device_list = [
            dev for dev in self.cfg.options('devices')
            if self.cfg.getboolean('devices', dev)
        ]

        for devname in device_list:
            dev = dict()
            options = self.cfg.options(devname)

            for o in options:
                dev[o] = self.cfg.get(devname, o)

            devices[devname] = dev

        return devices
コード例 #59
0
ファイル: stop_watcher.py プロジェクト: Seandals/stopwatcher
def create_config(config_path):
    config = dict()
    config_raw = ConfigParser()
    config_raw.read(DEFAULT_CONFIG)
    config_raw.read(config_path)
    ### Config
    config['send_stops'] = config_raw.getboolean('Config', 'STOPS')
    config['send_gyms'] = config_raw.getboolean('Config', 'GYMS')
    config['send_portals'] = config_raw.getboolean('Config', 'PORTALS')
    config['update_gym_stop'] = config_raw.getboolean(
        'Config', 'GYM_UPDATE_THROUGH_STOP')
    config['update_gym_portal'] = config_raw.getboolean(
        'Config', 'GYM_UPDATE_THROUGH_PORTAL')
    config['update_stop_portal'] = config_raw.getboolean(
        'Config', 'STOP_UPDATE_THROUGH_PORTAL')
    config['delete_stops'] = config_raw.getboolean('Config',
                                                   'DELETE_CONVERTED_STOP')
    config['dosleep'] = config_raw.getboolean('Config', 'LOOP')
    config['sleeptime'] = config_raw.getint('Config', 'SECONDS_BETWEEN_LOOPS')
    config['bbox'] = config_raw.get('Config', 'BBOX')
    config['bbox'] = list(config['bbox'].split(','))
    config['language'] = config_raw.get('Config', 'LANGUAGE')

    ### Discord
    config['webhook_url_stop'] = config_raw.get('Discord', 'STOP_WEBHOOK')
    config['webhook_url_gym'] = config_raw.get('Discord', 'GYM_WEBHOOK')
    config['webhook_url_portal'] = config_raw.get('Discord', 'PORTAL_WEBHOOK')
    config['stop_img'] = config_raw.get('Discord', 'STOP_IMAGE')
    config['embed_stop_color'] = config_raw.getint('Discord', 'STOP_COLOR')
    config['gym_img'] = config_raw.get('Discord', 'GYM_IMAGE')
    config['embed_gym_color'] = config_raw.getint('Discord', 'GYM_COLOR')
    config['portal_img'] = config_raw.get('Discord', 'PORTAL_IMAGE')
    config['embed_portal_color'] = config_raw.getint('Discord', 'PORTAL_COLOR')

    ### Static Map
    config['static_provider'] = config_raw.get('Static Map', 'PROVIDER')
    config['imgur_all'] = config_raw.getboolean(
        'Static Map', 'USE_IMGUR_MIRRORS_FOR_EVERYTHING')
    config['static_fancy'] = config_raw.getboolean('Static Map',
                                                   'SUPER_FANCY_STATIC_MAPS')
    config['client_id_imgur'] = config_raw.get('Static Map', 'IMGUR_CLIENT_ID')
    config['marker_limit'] = config_raw.getint('Static Map', 'MARKER_LIMIT')
    config['static_key'] = config_raw.get('Static Map', 'KEY')
    config['static_zoom'] = config_raw.getint('Static Map', 'ZOOM')
    config['static_width'] = config_raw.getint('Static Map', 'WIDTH')
    config['static_height'] = config_raw.getint('Static Map', 'HEIGHT')
    config['static_marker_size'] = config_raw.getint('Static Map',
                                                     'MARKER_SIZE')
    config['static_marker_color_stop'] = config_raw.get(
        'Static Map', 'MARKER_COLOR_STOP')
    config['static_marker_color_gym'] = config_raw.get('Static Map',
                                                       'MARKER_COLOR_GYM')
    config['static_marker_color_portal'] = config_raw.get(
        'Static Map', 'MARKER_COLOR_PORTAL')
    config['static_selfhosted_url'] = config_raw.get('Static Map',
                                                     'TILESERVER_URL')
    config['geocoding'] = config_raw.getboolean('Static Map', 'USE_GEOCODING')

    ### DATABASE
    config['db_scan_schema'] = config_raw.get('DB', 'SCANNER_DB_SCHEMA')
    config['db_portal_schema'] = config_raw.get('DB', 'PORTAL_DB_SCHEMA')
    config['db_host'] = config_raw.get('DB', 'HOST')
    config['db_port'] = config_raw.getint('DB', 'PORT')
    config['db_user'] = config_raw.get('DB', 'USER')
    config['db_pass'] = config_raw.get('DB', 'PASSWORD')
    config['db_portal_dbname'] = config_raw.get('DB', 'PORTAL_DB_NAME')
    config['db_dbname'] = config_raw.get('DB', 'SCANNER_DB_NAME')

    if config['db_scan_schema'] == "mad":
        config['db_stop_table'] = "pokestop"
        config['db_stop_id'] = "pokestop_id"
        config['db_stop_lat'] = "latitude"
        config['db_stop_lon'] = "longitude"
        config['db_stop_name'] = "name"
        config['db_stop_img'] = "image"

        config['db_gym_table'] = "gym"
        config['db_gymdetails_table'] = "gymdetails"
        config['db_gym_id'] = "gym_id"
        config['db_gym_lat'] = "latitude"
        config['db_gym_lon'] = "longitude"
        config['db_gym_name'] = "name"
        config['db_gym_img'] = "url"

    if config['db_scan_schema'] == "rdm":
        config['db_stop_table'] = "pokestop"
        config['db_stop_id'] = "id"
        config['db_stop_lat'] = "lat"
        config['db_stop_lon'] = "lon"
        config['db_stop_name'] = "name"
        config['db_stop_img'] = "url"

        config['db_gym_table'] = "gym"
        config['db_gymdetails_table'] = "gym"
        config['db_gym_id'] = "id"
        config['db_gym_lat'] = "lat"
        config['db_gym_lon'] = "lon"
        config['db_gym_name'] = "name"
        config['db_gym_img'] = "url"

    if config['db_portal_schema'] == "pmsf":
        config['db_portal_table'] = "ingress_portals"
        config['db_portal_id'] = "external_id"
        config['db_portal_lat'] = "lat"
        config['db_portal_lon'] = "lon"
        config['db_portal_name'] = "name"
        config['db_portal_img'] = "url"

    if config['static_marker_size'] == 0:
        if config['static_provider'] == "google":
            config['static_marker_size'] = "tiny"
        elif config['static_provider'] == "osm":
            print("Please choose a marker size between 1 and 3")
    elif config['static_marker_size'] == 1:
        if config['static_provider'] == "google":
            config['static_marker_size'] = "small"
        elif config['static_provider'] == "osm":
            config['static_marker_size'] = "sm"
    elif config['static_marker_size'] == 2:
        if config['static_provider'] == "google":
            config['static_marker_size'] = "mid"
        elif config['static_provider'] == "osm":
            config['static_marker_size'] = "md"
    elif config['static_marker_size'] == 3:
        if config['static_provider'] == "google":
            config['static_marker_size'] = "normal"
        elif config['static_provider'] == "osm":
            config['static_marker_size'] = "lg"
    else:
        print("Please choose another marker size.")

    return config
コード例 #60
0
ファイル: IniFile.py プロジェクト: wuha5086/pyllk
 def getboolean(self, sec, option, default=0):
     if self.has_option(sec, option):
         return ConfigParser.getboolean(self, sec, option)
     else:
         return default