Пример #1
0
    def load_config_file(self):
        """Parse configuration file and get config values."""
        config_parser = SafeConfigParser()

        config_parser.read(self.CONFIG_FILE)

        if config_parser.has_section('handlers'):
            self._config['handlers_package'] = config_parser.get('handlers', 'package')

        if config_parser.has_section('auth'):
            self._config['consumer_key'] = config_parser.get('auth', 'consumer_key')
            self._config['consumer_secret'] = config_parser.get('auth', 'consumer_secret')
            self._config['token_key'] = config_parser.get('auth', 'token_key')
            self._config['token_secret'] = config_parser.get('auth', 'token_secret')

        if config_parser.has_section('stream'):
            self._config['user_stream'] = config_parser.get('stream', 'user_stream').lower() == 'true'
        else:
            self._config['user_stream'] = False

        if config_parser.has_option('general', 'min_seconds_between_errors'):
            self._config['min_seconds_between_errors'] = config_parser.get('general', 'min_seconds_between_errors')
        if config_parser.has_option('general', 'sleep_seconds_on_consecutive_errors'):
            self._config['sleep_seconds_on_consecutive_errors'] = config_parser.get(
                'general', 'sleep_seconds_on_consecutive_errors')
Пример #2
0
	def read_conf_deepsearch(self): 
		self.cfg_deep = []
		parser = SafeConfigParser()
		parser.read(self.dirconf+'custom_params.ini')

		if(parser.has_option('general'  ,'deep_numserver') == False):
			return None

		numserver = parser.get('general', 'deep_numserver')	

		try:
			for i in xrange(int(numserver)):
				spc = self.get_conf_speedopt(parser, i, 'd')
				if ( spc == -1 ):
					spc = 1

				if(parser.has_option('deep_search_provider%d' % (i+1)  , 'type')):	
					typeds = parser.get('deep_search_provider%d' % (i+1)  , 'type')
				else:	
					typeds = 'DSN'

				d1 = {'url': parser.get('deep_search_provider%d' % (i+1)  , 'url'),
					  'user': parser.get('deep_search_provider%d' % (i+1)  , 'user'),
					  'pwd': parser.get('deep_search_provider%d' % (i+1)  , 'pwd'),
					  'type': typeds,
					  'speed_class': spc,
					  'valid': int(parser.getint('deep_search_provider%d' % (i+1)  , 'valid')),
					  }
				self.cfg_deep.append(d1)

		except Exception as e:
			print str(e)
			cfg_deep = None
Пример #3
0
    def testDatabaseContent(self):
        script_output_file = os.path.abspath('script.out')
        self.gnathub.run(script=Script.db2cfg(), output=script_output_file)

        parser = SafeConfigParser()
        parser.optionxform = str

        parser.read(script_output_file)
        sections = parser.sections()
        for s in SECTIONS:
            self.assertTrue(s in sections, "Missing section: " + s)

        self.assertTrue(parser.has_option(TEXT_IO[0], EXTERNAL_CALL[0]),
                        "Should be an external call")
        self.assertEqual(parser.get(TEXT_IO[0], EXTERNAL_CALL[0]),
                         EXTERNAL_CALL[1],
                         'Wrong message')

        self.assertTrue(parser.has_option(B_SIMPLE_ADB[0], ENTRY_POINT[0]),
                        "Entry point not found")
        self.assertTrue(parser.get(B_SIMPLE_ADB[0],
                                   ENTRY_POINT[0]).endswith(ENTRY_POINT[1]),
                        'unexpected value for the entry point')

        for metric in METRICS_SIMPLE_4_1:
            self.assertTrue(
                parser.has_option(SIMPLE_4_1[0], metric),
                'missing entry for "%s"' % metric)
Пример #4
0
	def initialize(globalConfigPath, interactive=True):
		RestHandler.interactive = interactive

		# read global config
		config = SafeConfigParser()
		config.read(globalConfigPath)

		RestHandler._apiurl = config.get("labcli", "apiurl").rstrip("/")
		RestHandler._install_rest_opener()
		RestHandler._userConfigPath = os.path.expanduser(config.get("labcli", "userconf"))

		userConfig = SafeConfigParser()
		userConfig.read(RestHandler._userConfigPath)
		
		RestHandler._globalConf = config
		RestHandler._userConf = userConfig

		# read local config if present
		if not userConfig.has_section("labcli"):
			userConfig.add_section("labcli")

		# if no user is present use current one
		if not userConfig.has_option("labcli", "user"):
			currentUser = subprocess.check_output("whoami", shell=True).strip()
			userConfig.set("labcli", "user", currentUser)
		RestHandler.labUser = userConfig.get("labcli", "user")

		if userConfig.has_option("labcli", "apikey"):
			RestHandler._apikey = userConfig.get("labcli", "apikey")
		elif interactive:
			# interactively negotiate apikey
			print("No apikey present for this user, fetching key...", file=sys.stderr)
			RestHandler._negotiateApiKey()
Пример #5
0
def storage_init(conf, **kw):
    """
    S3 implementation of the storage_init API call.
    Do one-time global setup: read our S3 API tokens and bucket name.
    Return True on success
    Return False on error 
    """
    global AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET, AWS_COMPRESS

    config_path = conf['path']
    if os.path.exists( config_path ):

        parser = SafeConfigParser()
        
        try:
            parser.read(config_path)
        except Exception, e:
            log.exception(e)
            return False

        if parser.has_section('s3'):
            
            if parser.has_option('s3', 'bucket'):
                AWS_BUCKET = parser.get('s3', 'bucket')
                
            if parser.has_option('s3', 'api_key_id'):
                AWS_ACCESS_KEY_ID = parser.get('s3', 'api_key_id')
            
            if parser.has_option('s3', 'api_key_secret'):
                AWS_SECRET_ACCESS_KEY = parser.get('s3', 'api_key_secret')
            
            if parser.has_option('s3', 'compress'):
                AWS_COMPRESS = (parser.get('s3', 'compress', 'false').lower() in ['true', '1'])
Пример #6
0
    def buildTestStruct(self, path=None):
        """
        A list of dictionaries is used so that order is preserved.
        [
            {'variant0': {'path': ['test0', 'test1', 'test2',...]}},
            {'variant1': {'path': ['test0', 'test1', 'test2',...]}},
            {'variant2': {'path': ['test0', 'test1', 'test2',...]}},
        ]
        """
        self.getCfgs(path=path)
        if(len(self.cfg_files) > 0):
            for config in self.cfg_files:
                variant_name = None
                cfg = SafeConfigParser()
                cfg.read(config)
                if(cfg.has_option('DEFAULT', 'PROJ_ROOT')):
                    proj_root = cfg.get('DEFAULT', 'PROJ_ROOT')
                else:
                    proj_root = ''
                path = os.path.normpath(os.path.split(config)[0])

                    # Check the config file for a user supplied variant name
                if(cfg.has_option('DEFAULT', 'VARIANT_NAME')):
                    variant_name = cfg.get('DEFAULT', 'VARIANT_NAME')

                    # If the user supplied a variant name then use it
                    # or use the directory name instead
                if(variant_name is None):
                    if(path == '.'):    # Use the directory name as the variant_name
                        variant_name = os.path.normpath(os.path.split(os.getcwd())[1])
                    else:
                        variant_name = path
                a = cfg.sections()
                a.sort()
                self.variants_and_tests.append({variant_name: {path: a}})
Пример #7
0
def generate_config_file( input_filename, output_filename, config_items ):
    '''
    Generate a config file with the configuration that has been defined for the embedded web application.
    This is mostly relevant when setting metadata externally, since the script for doing that does not
    have access to app.config.
    '''
    cp = SafeConfigParser()
    cp.read( input_filename )
    config_items_by_section = []
    for label, value in config_items:
        found = False
        # Attempt to determine the correct section for this configuration option.
        for section in cp.sections():
            if cp.has_option( section, label ):
                config_tuple = section, label, value
                config_items_by_section.append( config_tuple )
                found = True
                continue
        # Default to app:main if no section was found.
        if not found:
            config_tuple = 'app:main', label, value
            config_items_by_section.append( config_tuple )
    print( config_items_by_section )

    # Replace the default values with the provided configuration.
    for section, label, value in config_items_by_section:
        if cp.has_option( section, label ):
            cp.remove_option( section, label )
        cp.set( section, label, str( value ) )
    fh = open( output_filename, 'w' )
    cp.write( fh )
    fh.close()
Пример #8
0
def add_gallery_post(generator):

    contentpath = generator.settings.get('PATH')
    gallerycontentpath = os.path.join(contentpath,'images/gallery')
    parser = SafeConfigParser();

    for article in generator.articles:
        if 'gallery' in article.metadata.keys():
            album = article.metadata.get('gallery')
            galleryimages = []

            albumpath=os.path.join(gallerycontentpath, album)

            if(os.path.isdir(albumpath)):
                texts = os.path.join(albumpath, album+".txt")
                if os.path.isfile(texts):
                    #todo: use user defined encoding
                    parser.readfp(codecs.open(texts, "r", "utf8"))              
                        
                for i in os.listdir(albumpath):
                    if os.path.isfile(os.path.join(albumpath, i)):
                        alt   = " "
                        title = " "
                        iName, iExt = os.path.splitext(i)
                        if iExt.lower() in IMAGE_EXTENSIONS: 
                            if parser.has_option(iName, "alt"):
                                alt = parser.get(iName, "alt")
                            if parser.has_option(iName, "title"):
                                title = parser.get(iName, "title")
                            
                            galleryimages.append(galleryimage(i, alt, title))

            article.album = album
            article.galleryimages = sorted(galleryimages, key=lambda galleryimage: galleryimage.src)
Пример #9
0
def default_chaincom_opts(config_file=None):
    """
   Get our default chain.com options from a config file.
   """

    if config_file is None:
        config_file = virtualchain.get_config_filename()

    parser = SafeConfigParser()
    parser.read(config_file)

    chaincom_opts = {}

    api_key_id = None
    api_key_secret = None

    if parser.has_section("chain_com"):

        if parser.has_option("chain_com", "api_key_id"):
            api_key_id = parser.get("chain_com", "api_key_id")

        if parser.has_option("chain_com", "api_key_secret"):
            api_key_secret = parser.get("chain_com", "api_key_secret")

    chaincom_opts = {"utxo_provider": "chain_com", "api_key_id": api_key_id, "api_key_secret": api_key_secret}

    # strip Nones
    for (k, v) in chaincom_opts.items():
        if v is None:
            del chaincom_opts[k]

    return chaincom_opts
Пример #10
0
    def __init__(self,section='Settings',iniFile='vikatan.ini'):
        self._browser = Browser()
        self._profile = dict()
        consoleLogger()
        # Create Temp Dir [for images] and set it as default tempdir
        try:
            from ConfigParser import SafeConfigParser
            parser = SafeConfigParser()
            parser.read(iniFile)

            if parser.has_section(section):

                if parser.has_option(section,'username'):
                    self._profile['username']=parser.get(section,'username')
                    
                if parser.has_option(section,'password'):
                    self._profile['password']=parser.get(section,'password')
                    
                if parser.has_option(section,'magazine'):
                    mcode=parser.get(section,'magazine')
                    
        except:
            print 'Invalid Settings. Mode set to Default'
            
        try:
            mcode
            if mcode in self._magazines.keys(): self._mcode = mcode
            else: print self._magCodeErrorStr
        except NameError:
            print 'No Parameter found in %s. Assuming Default' % iniFile

        self.status()
def storage_init(conf, **kw):
   """
   Local disk implementation of the storage_init API call.
   Do one-time global setup--i.e. make directories.
   Return True on success
   Return False on error 
   """
   global DISK_ROOT, MUTABLE_STORAGE_ROOT, IMMUTABLE_STORAGE_ROOT

   config_path = conf['path']
   if os.path.exists( config_path ):

       parser = SafeConfigParser()
        
       try:
           parser.read(config_path)
       except Exception, e:
           log.exception(e)
           return False

       if parser.has_section('disk'):
           
           if parser.has_option('disk', 'root'):
               DISK_ROOT = parser.get('disk', 'root')

           if parser.has_option('disk', 'immutable'):
               IMMUTABLE_STORAGE_ROOT = parser.get('disk', 'immutable')

           if parser.has_option('disk', 'mutable'):
               MUTABLE_STORAGE_ROOT = parser.get('disk', 'mutable')
Пример #12
0
class ConfManager():
    """Common base class for all configuration manager"""

    def __init__(self, platform):
        """Initialize parser object"""
        print('Configuring CloudTUI components...')
        self.__platform = platform
        self.parser = SafeConfigParser()
        self.parser.read('conf/conf_files/' + self.__platform + '.conf')

    def read(self):
        """Read configuration file"""
        self.read_options()

    def read_login_data(self):
        """Read login data from login.txt file"""
        pass

    def read_monitor_data(self):
        """Read monitor configuration data from login.txt file"""
        self.monitor_port = self.parser.get('monitor', 'port')
        self.monitor_host = self.parser.get('monitor', 'host')
        self.monitor_enabled = self.parser.get('monitor', 'enabled')

    def read_options(self):
        """Read options values from [option] section"""
        if (self.parser.has_option('options', 'validate_certs')):
          self.validate_certs = self.parser.get('options', 'validate_certs')
        if (self.parser.has_option('options', 'terminal')):
          self.terminal = self.parser.get('options', 'terminal')
        else:
          self.terminal = "default"
Пример #13
0
def storage_init():
    """
    S3 implementation of the storage_init API call.
    Do one-time global setup: read our S3 API tokens and bucket name.
    Return True on success
    Return False on error 
    """
    global AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET

    if os.path.exists( CONFIG_PATH ):

        parser = SafeConfigParser()
        
        try:
            parser.read(CONFIG_PATH)
        except Exception, e:
            log.exception(e)
            return False

        if parser.has_section('s3'):
            
            if parser.has_option('s3', 'bucket'):
                AWS_BUCKET = parser.get('s3', 'bucket')
                
            if parser.has_option('s3', 'api_key_id'):
                AWS_ACCESS_KEY_ID = parser.get('s3', 'api_key_id')
            
            if parser.has_option('s3', 'api_key_secret'):
                AWS_SECRET_ACCESS_KEY = parser.get('s3', 'api_key_secret')
Пример #14
0
def getLayoutsFromManifest(fp, format, directory_name):
    parser = SafeConfigParser(None, multidict)
    parser.readfp(fp)

    layouts = {}
    for section in parser.sections():
        if not section.startswith(format.resourceType) or ':variants' in section:
            continue
        # id is a combination of directory name + filename
        if parser.has_option(section, 'file'):
            filename = parser.get(section, 'file')
        else:
            filename = ''  # this should not happen...
        _id = directory_name + '/' + filename
        if _id in layouts:
            # because TTW resources are created first, we consider layouts
            # with same id in a TTW to be taken before other resources
            continue
        data = {
            'directory': directory_name
        }
        for key in format.keys:
            if parser.has_option(section, key):
                data[key] = parser.get(section, key)
            else:
                data[key] = format.defaults.get(key, None)
        layouts[_id] = data

    return layouts
Пример #15
0
def run():
    """Get the arguments and parse the config file. Activate console. Get servers from the config file
    or from arguments. Show the query screen."""
    arguments = parseArguments()
    config = SafeConfigParser({'username': arguments.username, 'password': arguments.password})
    config.read(arguments.conf)
    servers = commonServers(config, arguments)

    chosenServers = {}
    for choice in choices:
        if config.sections():
            chosenServers[choice] = []
            for server in servers:
                if not config.has_option(str(server), choice) or config.getboolean(str(server), choice):
                    chosenServers[choice].append(server)
        else:
            chosenServers[choice] = servers

    # Global config params
    params = {}
    if config.has_option('DEFAULT', 'ignoreDbs') :
        params['ignoreDbs'] = config.get('DEFAULT', 'ignoreDbs').split(',')
    if config.has_option('DEFAULT', 'minSecsRunning') :
        params['minSecsRunning'] = int(config.get('DEFAULT', 'minSecsRunning'))

    with Console() as console:
        queryScreen = QueryScreen(console, chosenServers, autoKillSeconds=arguments.autoKillSeconds, params = params)
        try:
            queryScreen.action()
        except KeyboardInterrupt: pass
Пример #16
0
def fetch(version=''):
   # Download update.ver archive
   urlretrieve('http://update.eset.com/eset_upd/' + version + '/update.ver', '/tmp/' + version + '_update.ver.rar')

   # Extract update.ver
   RarFile('/tmp/' + version + '_update.ver.rar').extract('update.ver', path='/tmp/' + version)

   # Load update.ver
   config = SafeConfigParser()
   config.read('/tmp/' + version + '/update.ver')

   # Remove original host and expire section.
   config.remove_section('HOSTS')
   config.remove_section('Expire')

   # Force use my host
   config.add_section('HOSTS')
   config.set('HOSTS', 'Other', '200@http://WE_CLOUD/eset_upd/' + version)

   # Only fetch en-US and zh-CN
   for section in config.sections():
      if config.has_option(section, 'language'):
         if config.getint(section, 'language') != 1033 and config.getint(section, 'language') != 2052:
            config.remove_section(section)

   # Save update.ver
   with open(out_dir + 'eset_upd/' + version + '/update.ver', 'w') as ver_file:
      config.write(ver_file)

   # Process each file
   for section in config.sections():
      if config.has_option(section, 'file'):
         filename = config.get(section, 'file')
         if filename[:1] != '/':
            filename = '/eset_upd/' + version + '/' + filename

         dir = filename[1:find(filename, '/', 1)]
         if dir[rfind(dir, '-') + 1:] != 'sta':
            continue

         if config.has_option(section, 'size'):
            file_size = config.getint(section, 'size')
         else:
            file_size = -1

         if config.has_option(section, 'build'):
            file_build = config.getint(section, 'build')

            cursor.execute("SELECT * FROM eset WHERE file = '" + filename + "' AND build = " + str(file_build))
            if not cursor.rowcount:
               process(filename[1:], file_size)

               cursor.execute("UPDATE eset SET build = " + str(file_build) + " WHERE file = '" + filename + "'")
               if not cursor.rowcount:
                  cursor.execute("INSERT INTO eset (file, build) VALUES ('" + filename + "', " + str(file_build) + ")")

               db.commit()
         else:
            process(filename[1:], file_size)
Пример #17
0
class ConfigBasedTests(unittest.TestCase):
    
    def setUp(self):
        self.log = logging.getLogger("Fourchapy.tests.ConfigBase.%s" % type(self).__name__)
        
        self.log.debug("Loading config")
        self.cfg = SafeConfigParser()
        self.log.debug("Reading config file")
        self.cfg.read(DEFAULT_CONFIG_FILE)
        
        self.log.debug("Setting up logging")
        if not self.cfg.has_section('Global'):
            self.cfg.add_section('Global')
        # Global config - Logging level
        if not self.cfg.has_option('Global', 'loggingLevel'):
            self.cfg.set('Global', 'loggingLevel', str(DEFAULT_LOGGING_LEVEL))
        self.log.setLevel(self.cfg.getint('Global', 'loggingLevel'))
        # Global config - proxies
        self.proxy = {}
        self.proxy['http'] = self._get_option(
                                              section = 'Global',
                                              option = 'proxy_http',
                                              default = None,
                                              vtype = 'str',
                                              )
# Doesn't actually work atm - urllib doesn't support using
# https and a proxy at the same time. 
#        self.proxy['https'] = self._get_option(
#                                              section = 'Global',
#                                              option = 'proxy_https',
#                                              default = None,
#                                              vtype = 'str',
#                                              )
        for k, v in self.proxy.items():
            if v is None:
                del self.proxy[k]
        self.log.debug("Set proxy to %r", self.proxy)
        
    def _get_option(self, section, option, default, vtype = 'str'):
        """ Get the option and set it if it doesn't exist """
        self.log.debug("Going to get/set %r.%r of type %r, default %r", section, option, vtype, default)
        if not self.cfg.has_section(section):
            self.cfg.add_section(section)
            self.log.debug("Added section %r to config", section)
        if not self.cfg.has_option(section, option):
            self.cfg.set(section, option, str(default))
            self.log.debug("Added option %r.%r to config with value %r", section, option, default)
        if vtype == 'str' or vtype is None:
            ret = self.cfg.get(section, option)
        else:
            attr = "get%s" % vtype
            assert hasattr(self.cfg, attr), "Entry type %r doesn't exist (aka ConfigObj.%r)" % (vtype, attr) 
            ret = getattr(self.cfg, attr)(section, option)
        self.log.debug("%r.%r=%r", section, option, ret)
        return ret
Пример #18
0
def createGenericServerCls(path, filename, conf):
    """Create a ServerProcess class representing a generic server.

    Options for this server are passed in as a string in standard
    .ini format.  We use a string rather than a file to allow this
    configuration to be extracted from a larger file if necessary.
    """
    class cls(ServerProcess):
        pass

    scp = SafeConfigParser()
    scp.readfp(StringIO.StringIO(conf))

    # general information
    cls.name = scp.get('info', 'name', raw=True)
    cls.__doc__ = scp.get('info', 'description', raw=True)
    if scp.has_option('info', 'version'):
        cls.version = scp.get('info', 'version', raw=True)
    else:
        cls.version = '0.0'
    try:
        cls.instancename = scp.get('info', 'instancename', raw=True)
    except:
        cls.instancename = cls.name
    cls.environVars = findEnvironmentVars(cls.instancename)
    cls.isLocal = len(cls.environVars) > 0

    # startup
    platform_cmdline_option = 'cmdline_{}'.format(sys.platform)
    if scp.has_option('startup', platform_cmdline_option):
        # use platform-specific command line
        cls.cmdline = scp.get('startup', platform_cmdline_option, raw=True)
    else:
        # use generic command line
        cls.cmdline = scp.get('startup', 'cmdline', raw=True)
    cls.path = path
    cls.filename = filename
    try:
        cls.timeout = float(scp.getint('startup', 'timeout'))
    except:
        pass

    # shutdown
    if scp.has_option('shutdown', 'message'):
        cls.shutdownMode = 'message', int(scp.get('shutdown', 'message', raw=True))
    elif scp.has_option('shutdown', 'setting'):
        cls.shutdownMode = 'setting', scp.get('shutdown', 'setting', raw=True)
    try:
        cls.shutdownTimeout = float(scp.getint('shutdown', 'timeout'))
    except:
        pass

    return cls
Пример #19
0
def clixxIOProjectAutostarts(showDisabled=False):
    """
    Return the names of all projects maintained by the system.

    These are typically directories stored in the IoT directory
    """
    autostarts = {}

    pl = clixxIOListProjects()
    for p in pl:

        cf = clixxIOlProjectConfigFilename(p)
        cp = SafeConfigParser()
        cp.read(cf)

        # Use some strings to hold values
        ad = clixxIOProjectDir(p)
        ac = ""
        aa = ""
        ae = ""

        if cp.has_option("autostart", "directory"):
            ad = cp.get("autostart", "directory", "")

        if cp.has_option("autostart", "command"):
            ac = cp.get("autostart", "command")

        if cp.has_option("autostart", "arguments"):
            aa = cp.get("autostart", "arguments")

        if cp.has_option("autostart", "enabled"):
            ae = cp.get("autostart", "enabled")

        if showDisabled:

            autostarts[
                p] = {
                "directory": ad,
                "command": ac,
                "arguments": aa,
                "enabled": ae}

        elif ae.lower() == "true":

            autostarts[
                p] = {
                "directory": ad,
                "command": ac,
                "arguments": aa,
                "enabled": ae}

    return autostarts
Пример #20
0
def _import_manifest(filename, cluster):
    """handle the uploaded module-file, return True on succes, else False"""
    temp_path = "/tmp/"
    with tarfile.open(fileobj=filename, mode="r:gz") as mytar:
        control_file = mytar.extractfile("control")
        parser = SafeConfigParser()
        parser.readfp(control_file)
        #        if cluster.domain is not None:
        #            if cluster.domain == parser.get("DEFAULT", "host"):
        #                return False
        doc_file = mytar.extractfile("description.txt")
        desc = doc_file.read()
        ci_names = _split_ciline(parser.get("DEFAULT", "ci"))
        interfaces = []
        for ci_name in ci_names:
            ci_file = mytar.extractfile(ci_name)
            ci = ci_file.read()
            ci_hash = _hash_file(mytar.extractfile(ci_name))
            interfaces.append((ci_name, ci, ci_hash))
        name = parser.get("DEFAULT", "name")
        exe_hash = parser.get("DEFAULT", "exe_hash")
        path = parser.get("DEFAULT", "exe")
        version = "1.0"
        if parser.has_option("DEFAULT", "version"):
            version = parser.get("DEFAULT", "version")
        if Components.objects.filter(exe_hash=exe_hash):
            component = Components.objects.get(exe_hash=exe_hash)
            component.description = desc
            component.version = version
        else:
            component = Components(description=desc, is_active=False, version=version, exe_hash=exe_hash)
        component.save()
        comp_cluster = Components_Cluster(component=component, cluster=cluster, name=name)
        comp_cluster.save()
        from os.path import splitext, basename

        for ci_name, ci, ci_hash in interfaces:
            interface_name = splitext(basename(ci_name))[0]
            interface, created = Interfaces.objects.get_or_create(ci_hash=ci_hash)
            if created:
                interface.name = interface_name
                interface.ci = ci
            interface.save()
            inter_comp = Interfaces_Components(interface=interface, component=component)
            inter_comp.save()
        if parser.has_option("DEFAULT", "author"):
            authors = parser.get("DEFAULT", "author").split(" ")
            for author in authors:
                programmer = Programmer(component=component, email=author)
                programmer.save()
        return True
    return False
Пример #21
0
class Config(object):
    def __init__(self, path='', name=''):
        if name:
            cfg_file = os.path.join(path, name)
        else:
            cfg_file = os.path.join(path, DEFAULT_CFG)
        self.parser = SafeConfigParser()
        if not self.parser.read(cfg_file):
            print 'No configuration file found:', cfg_file
            self.new_cfg()

    def new_cfg(self):
        self.section_gen()
        self.section_db()
        with open(DEFAULT_CFG, 'w') as new_cfg:
            print 'Creating new config file in CWD:', DEFAULT_CFG
            print 'Please double check the default values before running again:'
            print self
            self.parser.write(new_cfg)
        sys.exit(0)

    def section_gen(self):
        sec = 'general'
        self.parser.add_section(sec)
        self.parser.set(sec, 'output', 'sqlite3 #default')
        self.parser.set(sec, '#output', 'stdout')

    def section_db(self):
        sec = 'database'
        self.parser.add_section(sec)
        self.parser.set(sec, 'path', os.getcwd())
        self.parser.set(sec, 'user', 'frankenstein')
        self.parser.set(sec, 'pw', 'PuttinOnTheRitz')
        self.parser.set(sec, 'db', 'frankenstein.sqlite')

    def setting(self, section='', option=''):
        if not section:
            for s in self.parser.sections():
                if self.parser.has_option(s, option):
                    return self.parser.get(s, option)
        elif self.parser.has_option(section, option):
            return self.parser.get(section, option)
        else:
            return None

    def __str__(self):
        rv = ''
        for sect in self.parser.sections():
            rv += 'Section: %s\n' % sect
            for opt in self.parser.options(sect):
                rv += '\t%s\t=\t%s\n' % (opt, self.parser.get(sect, opt))
        return rv
def storage_init(conf, **kw):
    """
    Set up and load storage
    """
    global ONEDRIVE_FOLDER_NAME, ONEDRIVE_FOLDER_ID, ONEDRIVE_COMPRESS
    global CLIENT_ID, CLIENT_SECRET, REDIRECT_URL, SESSION_SAVE_PATH, RELOAD_DRIVE
    global DOWNLOAD_SCRATCH_SPACE
    
    config_path = conf['path']
    settings_dir = get_driver_settings_dir(config_path, 'onedrive')
    DOWNLOAD_SCRATCH_SPACE = os.path.join(settings_dir, ".scratch")
    SESSION_SAVE_PATH = os.path.join(settings_dir, 'onedrive.session')

    RELOAD_DRIVE = False

    if ONEDRIVE_FOLDER_ID is None or ONEDRIVE_HANDLE is None:
        RELOAD_DRIVE = True

    if os.path.exists( config_path ):

        parser = SafeConfigParser()
        
        try:
            parser.read(config_path)
        except Exception, e:
            log.exception(e)
            return False

        if parser.has_section('onedrive'):
            for required_section in ['folder', 'application_id', 'application_password']:
                if not parser.has_option('onedrive', required_section):
                    log.error("Config file {}: section 'onedrive' is missing '{}'".format(config_path, required_section))
                    return False

            if parser.get('onedrive', 'folder') != ONEDRIVE_FOLDER_NAME:
                RELOAD_DRIVE = True
            
            ONEDRIVE_FOLDER_NAME = parser.get('onedrive', 'folder')
            CLIENT_ID = parser.get('onedrive', 'application_id')
            CLIENT_SECRET = parser.get('onedrive', 'application_password')
            
            if parser.has_option('onedrive', 'compress'):
                ONEDRIVE_COMPRESS = (parser.get('onedrive', 'compress').lower() in ['1', 'true', 'yes'])

            if parser.has_option('onedrive', 'redirect_uri'):
                REDIRECT_URL = parser.get('onedrive', 'redirect_uri')
            else:
                REDIRECT_URL = "http://localhost:8080/"
            
        else:
            log.error("Config file {}: no 'onedrive' section")
            return False
 def getChampionName(self, championId):
     # This method takes the championId as input and returns the champion name associated with it
     # If our config file doesn't have the champion name, it makes an API call to get a list of all 
     # champion names and stores them in config
     # Globals: none
     
     # Check if config file has a section for champions
     configFileLocation = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
     configFileLocation = configFileLocation + "\config.ini"
     config = SafeConfigParser()
     config.read(configFileLocation)
     hasSection = config.has_section('champions')
     
     # If so, check if we already know what the champion name is from prior API calls
     if hasSection:
         hasChampName = config.has_option('champions',  str(championId))
         if hasChampName:
             championName = config.get('champions',  str(championId))
             return championName
         else:
             pass
     
     # If we don't have the section altogether, make it and then make the API call
     else:
         config.add_section('champions')
         with open(configFileLocation, 'w') as f:
             config.write(f)
         config.read(configFileLocation)
     
     # If we get to this point, we make the API call and store the names of all champions in the config file
     requestURL = ("https://global.api.pvp.net/api/lol/static-data/na/v1.2/champion?champData=info&api_key=" + apiKey)
     championListResponse = requests.get(requestURL)
     responseMessage = self.checkResponseCode(championListResponse)
     if responseMessage == "ok":
         championListResponse = json.loads(championListResponse.text)
         championListResponse = championListResponse["data"]
         for champion in championListResponse:
             championId = championListResponse[champion]["id"]
             championName = championListResponse[champion]["name"]
             config.set('champions', str(championId), championName)
         with open(configFileLocation, 'w') as f:
             config.write(f)
         config.read(configFileLocation)
         if config.has_option('champions',  str(championId)):
             championName = config.get('champions',  str(championId))
             return championName
         else:
             print "An error occurred while trying to write the config file, from in getChampionName method"
             return None
     else:
         print responseMessage + ", from getChampionName method"
         return None
Пример #24
0
def loadConfig(filename):
    options = dict(OPTIONS)
    if os.path.exists(filename):
        cp = SafeConfigParser()
        cp.read([filename])
        for option in ('format', 'prompt'):
            if cp.has_option('twitter', option):
                options[option] = cp.get('twitter', option)
        # process booleans
        for option in ('invert_split',):
            if cp.has_option('twitter', option):
                options[option] = cp.getboolean('twitter', option)
    return options
Пример #25
0
def createThemeFromTemplate(title, description, baseOn='template'):
    """Create a new theme from the given title and description based on
    another theme resource directory
    """

    source = queryResourceDirectory(THEME_RESOURCE_NAME, baseOn)
    if source is None:
        raise KeyError("Theme {0:s} not found".format(baseOn))

    themeName = getUtility(IURLNormalizer).normalize(title)
    if isinstance(themeName, unicode):
        themeName = themeName.encode('utf-8')

    resources = getOrCreatePersistentResourceDirectory()

    resources.makeDirectory(themeName)
    target = resources[themeName]

    cloneResourceDirectory(source, target)

    manifest = SafeConfigParser()

    if MANIFEST_FILENAME in target:
        fp = target.openFile(MANIFEST_FILENAME)
        try:
            manifest.readfp(fp)
        finally:
            fp.close()

    if not manifest.has_section('theme'):
        manifest.add_section('theme')

    manifest.set('theme', 'title', title)
    manifest.set('theme', 'description', description)

    if manifest.has_option('theme', 'prefix'):
        prefix = u"/++%s++%s" % (THEME_RESOURCE_NAME, themeName)
        manifest.set('theme', 'prefix', prefix)

    if manifest.has_option('theme', 'rules'):
        rule = manifest.get('theme', 'rules')
        rule_file_name = rule.split('/')[-1]  # extract real rules file name
        rules = u"/++%s++%s/%s" % (THEME_RESOURCE_NAME, themeName,
                                   rule_file_name)
        manifest.set('theme', 'rules', rules)

    manifestContents = StringIO()
    manifest.write(manifestContents)
    target.writeFile(MANIFEST_FILENAME, manifestContents)

    return themeName
Пример #26
0
    def read_config (self):
        parser = SafeConfigParser ()
        rc = file (self.path, "r")
        parser.readfp (rc)
        rc.close ()

        i = 0
        while parser.has_option ("feeds", "name"+str(i)) and \
              parser.has_option ("feeds", "url"+str(i)):
            self.feeds.append (
                { "name" : parser.get ("feeds", "name"+str(i)),
                  "url"  : parser.get ("feeds", "url"+str(i)) }
                )
            i += 1
Пример #27
0
def main():
    email = ''
    password = ''
    author = ''

    # parse passord file
    cParser = SafeConfigParser()
    is_exists = cParser.read(PasswdFile)
    if is_exists and cParser.has_section('blogger.py'):
        if cParser.has_option('blogger.py', 'email'):
            email = cParser.get('blogger.py', 'email')
        if cParser.has_option('blogger.py', 'password'):
            password = cParser.get('blogger.py', 'password')
        if cParser.has_option('blogger.py', 'author'):
            author = cParser.get('blogger.py', 'author')

    # parse command-line options
    oParser = OptionParser()

    oParser.add_option("-e", "--email", dest="email", help="E-mail address")
    oParser.add_option("-p", "--password", dest="password", help="Password")
    oParser.add_option("-u", "--author", dest="author", help="Blog author")
    oParser.add_option("-d", "--draft", dest="is_draft", action="store_true",
        help="post draft data")
    oParser.add_option("-f", "--file", dest="blogFile", help="Blog data file")

    oParser.set_defaults(email=email, password=password, author=author)
    oParser.set_description('follow options can be set by ~/.password.')

    option, args = oParser.parse_args()

    email = option.email
    password = option.password
    author = option.author

    if '' in (email, password, author):
        oParser.print_help()
        sys.exit(1)

    blogger = Blogger(email, password)
    if option.blogFile:
        title, content, label = createBlogData(file(option.blogFile))
    else:
        title, content, label = createBlogData()

    if title and content:
        blogger.createPost(title, content, author, label, option.is_draft)
    else:
        print 'No Blog data, do nothing'
        sys.exit(1)
Пример #28
0
def default_blockstore_opts( config_file=None, testset=False ):
   """
   Get our default blockstore opts from a config file
   or from sane defaults.
   """
   
   if config_file is None:
      config_file = virtualchain.get_config_filename()
   
   parser = SafeConfigParser()
   parser.read( config_file )
   
   blockstore_opts = {}
   tx_broadcaster = None 
   utxo_provider = None
   testset_first_block = None
   max_subsidy = 0
   
   if parser.has_section('blockstore'):
      
      if parser.has_option('blockstore', 'tx_broadcaster'):
         tx_broadcaster = parser.get('blockstore', 'tx_broadcaster')
      
      if parser.has_option('blockstore', 'utxo_provider'):
         utxo_provider = parser.get('blockstore', 'utxo_provider')
      
      if parser.has_option('blockstore', 'testset'):
         testset = bool(parser.get('blockstore', 'testset'))
         
      if parser.has_option('blockstore', 'testset_first_block'):
         testset_first_block = int( parser.get('blockstore', 'testset_first_block') )
         
      if parser.has_option('blockstore', 'max_subsidy'):
         max_subsidy = int( parser.get('blockstore', 'max_subsidy'))
         
         
   blockstore_opts = {
       'tx_broadcaster': tx_broadcaster,
       'utxo_provider': utxo_provider,
       'testset': testset,
       'testset_first_block': testset_first_block,
       'max_subsidy': max_subsidy
   }
   
   # strip Nones 
   for (k, v) in blockstore_opts.items():
      if v is None:
         del blockstore_opts[k]
   
   return blockstore_opts
def get_config( argv, cwd=None ):
    """
    Read sys.argv and parse out repository of migrations and database url.

    >>> from tempfile import mkdtemp
    >>> config_dir = mkdtemp()
    >>> os.makedirs(os.path.join(config_dir, 'config'))
    >>> def write_ini(path, property, value):
    ...     p = SafeConfigParser()
    ...     p.add_section('app:main')
    ...     p.set('app:main', property, value)
    ...     with open(os.path.join(config_dir, 'config', path), 'w') as f: p.write(f)
    >>> write_ini('tool_shed.ini', 'database_connection', 'sqlite:///pg/testdb1')
    >>> config = get_config(['manage_db.py', 'tool_shed'], cwd=config_dir)
    >>> config['repo']
    'lib/galaxy/webapps/tool_shed/model/migrate'
    >>> config['db_url']
    'sqlite:///pg/testdb1'
    >>> write_ini('galaxy.ini', 'database_file', 'moo.sqlite')
    >>> config = get_config(['manage_db.py'], cwd=config_dir)
    >>> config['db_url']
    'sqlite:///moo.sqlite?isolation_level=IMMEDIATE'
    >>> config['repo']
    'lib/galaxy/model/migrate'
    """
    if argv and (argv[-1] in DATABASE):
        database = argv.pop()  # database name tool_shed, galaxy, or install.
    else:
        database = 'galaxy'
    database_defaults = DATABASE[ database ]

    config_file = read_config_file_arg( argv, database_defaults.get( 'config_file', DEFAULT_CONFIG_FILE ), database_defaults.get( 'old_config_file' ) )
    repo = database_defaults[ 'repo' ]
    config_prefix = database_defaults.get( 'config_prefix', DEFAULT_CONFIG_PREFIX )
    default_sqlite_file = database_defaults[ 'default_sqlite_file' ]
    if cwd:
        config_file = os.path.join( cwd, config_file )

    cp = SafeConfigParser()
    cp.read( config_file )

    if cp.has_option( "app:main", "%sdatabase_connection" % config_prefix):
        db_url = cp.get( "app:main", "%sdatabase_connection" % config_prefix )
    elif cp.has_option( "app:main", "%sdatabase_file" % config_prefix ):
        db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % cp.get( "app:main", "database_file" )
    else:
        db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % default_sqlite_file

    require_dialect_egg( db_url )
    return dict(db_url=db_url, repo=repo, config_file=config_file, database=database)
Пример #30
0
class ConfigStore(object):
    def __init__(self):
        self.config = SafeConfigParser()
        if hasattr(appdirs, 'user_config_dir'):
            data_dir = appdirs.user_config_dir('Photini')
        else:
            data_dir = appdirs.user_data_dir('Photini')
        if not os.path.isdir(data_dir):
            os.makedirs(data_dir, mode=0700)
        self.file_name = os.path.join(data_dir, 'photini.ini')
        old_file_name = os.path.expanduser('~/photini.ini')
        if os.path.exists(old_file_name):
            self.config.read(old_file_name)
            self.save()
            os.unlink(old_file_name)
        self.config.read(self.file_name)
        self.timer = QtCore.QTimer()
        self.timer.setSingleShot(True)
        self.timer.setInterval(3000)
        self.timer.timeout.connect(self.save)
        self.has_section = self.config.has_section

    def get(self, section, option, default=None):
        if self.config.has_option(section, option):
            return self.config.get(section, option)
        if default is not None:
            self.set(section, option, default)
        return default

    def set(self, section, option, value):
        if not self.config.has_section(section):
            self.config.add_section(section)
        if (self.config.has_option(section, option) and
                self.config.get(section, option) == value):
            return
        self.config.set(section, option, value)
        self.timer.start()

    def remove_section(self, section):
        if not self.config.has_section(section):
            return
        for option in self.config.options(section):
            self.config.remove_option(section, option)
        self.config.remove_section(section)
        self.timer.start()

    def save(self):
        self.config.write(open(self.file_name, 'w'))
        os.chmod(self.file_name, 0600)
Пример #31
0
def configure(argv,
              hasRPMSupport=False,
              hasSmartSupport=False,
              hasZypperSupport=False):
    optionParser = OptionParser(
        usage="%prog [options] <searchTerm>",
        version="%%prog %s" % VERSION,
        description=
        "A command-line client for the openSUSE Package Search web service.")
    optionParser.add_option('',
                            '--config',
                            action='store',
                            type='string',
                            dest='configFile',
                            default=defaultUserConfigFile,
                            help="user configuration file (defaults to %s)" %
                            defaultUserConfigFile,
                            metavar="FILE")
    optionParser.add_option(
        '',
        '--skip-global-config',
        action='store_false',
        dest='readGlobalConfig',
        default=True,
        help="skip reading the global configuration file %s" %
        globalConfigFile)
    optionParser.add_option(
        '',
        '--skip-config',
        action='store_false',
        dest='readConfig',
        default=True,
        help="skip reading configuration files alltogether")
    optionParser.add_option('-n',
                            '--by-name',
                            action='store_const',
                            const='ByName',
                            dest='mode',
                            help="only search for matches in package names")
    optionParser.add_option('-c',
                            '--by-content',
                            action='store_const',
                            const='ByContents',
                            dest='mode',
                            help="also search for matches in all file names")
    optionParser.add_option(
        '-s',
        '--simple',
        action='store_const',
        const='Simple',
        dest='mode',
        help=
        "search for matches in package names, package summaries and first match in file names (default)"
    )
    optionParser.add_option(
        '-d',
        '--dist',
        type='string',
        dest='version',
        default=None,
        help=
        "openSUSE version to search for (defaults to %s, may specify 'factory' for Factory or 'latest' for latest release)"
        % defaultSuseVersion,
        metavar="VERSION")
    optionParser.add_option(
        '-l',
        '--latest',
        action='store_const',
        const=latestSuseVersion,
        dest='version',
        help="search in the latest released openSUSE version (%s)" %
        latestSuseVersion)
    optionParser.add_option(
        '-F',
        '--factory',
        action='store_const',
        const='factory',
        dest='version',
        help="search in the openSUSE development version (Factory)")
    optionParser.add_option(
        '-u',
        '--url',
        action='store_true',
        dest='showURL',
        default=False,
        help=
        "also show the URLs of the repositories that contain matching packages"
    )
    optionParser.add_option(
        '-a',
        '--arch',
        action='store_true',
        dest='showArch',
        default=False,
        help=
        "also show the architectures each package match is available for (defaults to false)"
    )
    # disabled for now, will need to add RPM release information in web service results first:
    #optionParser.add_option('-f', '--file', action='store_true', dest='showFileURL', default=False,
    #		help="also show the fully qualified RPM file URLs")
    optionParser.add_option(
        '-t',
        '--timeout',
        action='store',
        type='int',
        dest='timeout',
        default=defaultTimeout,
        help="timeout in seconds for the web service request",
        metavar="TIMEOUT")
    optionParser.add_option(
        '-q',
        '--quiet',
        action='store_false',
        dest='verbose',
        default=True,
        help="don't display progress information (for dumb terminals)")
    optionParser.add_option(
        '-A',
        '--no-ansi',
        action='store_false',
        dest='color',
        default=True,
        help="don't use ANSI escape sequences (for dumb terminals), implies -q"
    )
    optionParser.add_option(
        '',
        '--theme',
        action='store',
        type='string',
        dest='colorScheme',
        default=None,
        help="color scheme to use (unless -A/--no-ansi) -- valid values: %s" %
        (', '.join(colorSchemeMap.keys())),
        metavar='NAME')
    optionParser.add_option(
        '-D',
        '--dump',
        action='store_true',
        dest='dump',
        default=False,
        help="simply dump the XML tree sent back by the server")
    optionParser.add_option('-U',
                            '--show-url',
                            action='store_true',
                            dest='showQueryURL',
                            default=False,
                            help="show the web service query URL")
    optionParser.add_option(
        '',
        '--proxy',
        action='store',
        type='string',
        dest='proxy',
        default=defaultHttpProxy,
        help=
        "HTTP proxy server to use for performing the request (if not specified, uses the http_proxy environment variable)",
        metavar="SERVER:PORT")
    optionParser.add_option('',
                            '--proxy-auth',
                            action='store',
                            type='string',
                            dest='proxyAuth',
                            default=None,
                            help="HTTP proxy authentication",
                            metavar="USER:PASSWORD")
    optionParser.add_option(
        '',
        '--stack-trace',
        action='store_true',
        dest='showStackTrace',
        default=False,
        help=
        "show stack traces on exceptions (only useful for submitting bug reports)"
    )

    helpAddonForRPM = ''
    if not hasRPMSupport:
        helpAddonForRPM = ' (N/A)'
        pass

    optionParser.add_option(
        '-r',
        '--rpm',
        action='store_true',
        dest='rpm',
        default=False,
        help="compare package matches with your current RPM database" +
        helpAddonForRPM)
    optionParser.add_option(
        '',
        '--rpm-root',
        action='store',
        type='string',
        dest='rpmRoot',
        default=None,
        help=
        "set the root directory for the RPM database (not the path to the RPM database but the root of the system)"
        + helpAddonForRPM,
        metavar="DIRECTORY")

    helpAddonForSmart = ''
    if not hasSmartSupport:
        helpAddonForSmart = ' (N/A)'
        pass

    optionParser.add_option('',
                            '--smart',
                            action='store_true',
                            dest='smart',
                            default=False,
                            help="enable smart support to check repositories" +
                            helpAddonForSmart)
    #optionParser.add_option('', '--smart-add', action='store_true', dest='smartAdd', default=False,
    #		help="prompt for adding repositories to smart" + helpAddonForSmart)

    helpAddonForZypper = ''
    if not hasZypperSupport:
        helpAddonForZypper = ' (N/A)'

    optionParser.add_option(
        '',
        '--zypper',
        action='store_true',
        dest='zypper',
        default=False,
        help="enable zypper support to check repositories" +
        helpAddonForZypper)

    (options, args) = optionParser.parse_args(argv)

    if options.readConfig:
        try:
            from ConfigParser import SafeConfigParser
        except ImportError:
            from ConfigParser import ConfigParser
            pass
        try:
            configParser = SafeConfigParser()
        except NameError:
            configParser = ConfigParser()
            pass

        configModeMap = {
            'simple': 'Simple',
            'name': 'ByName',
            'content': 'ByContent'
        }

        userConfigFile = os.path.expanduser(options.configFile)
        configFiles = []
        if options.readGlobalConfig:
            configFiles.append(globalConfigFile)
            pass
        configFiles.append(userConfigFile)

        try:
            configParser.read(configFiles)
        except Exception, e:
            print >> sys.stderr, "Error while reading configuration from %s: %s" % (
                " and ".join(configFiles), e)
            if options.showStackTrace:
                import traceback
                traceback.print_exc()
                pass
            sys.exit(E_CONFIG)
            pass

        # set configuration values as defaults in OptionParser:
        def setOption(type, section, name, option=None):
            if not option:
                option = name
                pass
            if configParser.has_option(section, name):
                m = getattr(configParser, 'get%s' % type)
                optionParser.set_default(option, m(section, name))
                return True
            return False

        if configParser.has_option('General', 'mode'):
            modeConfig = configParser.get('General', 'mode')
            if configModeMap.has_key(modeConfig):
                optionParser.set_default('mode', configModeMap[modeConfig])
            else:
                print >> sys.stderr, 'ERROR: invalid configuration value for parameter "mode" in section "General": %s' % modeConfig
                print >> sys.stderr, 'Valid values are: %s' % ', '.join(
                    configModeMap.keys())
                sys.exit(E_CONFIG)
                pass
            pass
        setOption('', 'General', 'distribution', 'version')
        setOption('boolean', 'Output', 'color')
        setOption('', 'Output', 'theme', 'colorScheme')
        setOption('boolean', 'Output', 'url', 'showURL')
        setOption('boolean', 'Output', 'arch', 'showArch')
        setOption('boolean', 'Output', 'verbose')
        setOption('boolean', 'Output', 'show_query_url', 'showQueryURL')
        if hasRPMSupport:
            setOption('boolean', 'RPM', 'rpm')
            setOption('', 'RPM', 'root', 'rpmRoot')
            pass
        if hasSmartSupport:
            setOption('boolean', 'Smart', 'smart')
            setOption('boolean', 'Smart', 'prompt')
            pass
        setOption('int', 'Network', 'timeout')
        setOption('', 'Network', 'proxy')
        setOption('', 'Network', 'proxy_auth', 'proxyAuth')

        # run option parsing again, now with defaults from the configuration files
        (options, args) = optionParser.parse_args(sys.argv)
        pass
Пример #32
0
class GlobalSettings(Signallable):
    """
    Global PiTiVi settings.

    The settings object loads settings from three different sources: the
    global configuration, the local configuration file, and the environment.
    Modules declare which settings they wish to access by calling the
    addConfigOption() class method during initialization.

    @cvar options: A dictionnary of available settings.
    @cvar environment: A list of the controlled environment variables.
    """

    options = {}
    environment = set()
    defaults = {}
    __signals__ = {}

    def __init__(self, **kwargs):
        Signallable.__init__(self)
        self._config = SafeConfigParser()
        self._readSettingsFromGlobalConfiguration()
        self._readSettingsFromConfigurationFile()
        self._readSettingsFromEnvironmentVariables()

    def _readSettingsFromGlobalConfiguration(self):
        # ideally, this should read settings from GConf for ex
        pass

    def _readSettingsFromConfigurationFile(self):
        # This reads the configuration from the user configuration file

        try:
            pitivi_path = self.get_local_settings_path()
            pitivi_conf_file_path = os.path.join(pitivi_path, "pitivi.conf")
            self._config.read(pitivi_conf_file_path)

        except ParsingError:
            return

        for (section, attrname, typ, key, env, value) in self.iterAllOptions():
            if not self._config.has_section(section):
                continue
            if key and self._config.has_option(section, key):
                if typ == int or typ == long:
                    # WARNING/FIXME : This try/except is for a small cockup in previous
                    # configurations where we stored a float value... but declared it
                    # as an integer.
                    try:
                        value = self._config.getint(section, key)
                    except ValueError:
                        value = int(self._config.getfloat(section, key))
                elif typ == float:
                    value = self._config.getfloat(section, key)
                elif typ == bool:
                    value = self._config.getboolean(section, key)
                else:
                    value = self._config.get(section, key)
                setattr(self, attrname, value)

    def _readSettingsFromEnvironmentVariables(self):
        for (section, attrname, typ, key, env, value) in self.iterAllOptions():
            var = get_env_by_type(typ, env)
            if var is not None:
                setattr(self, attrname, value)

    def _writeSettingsToConfigurationFile(self):
        pitivi_path = self.get_local_settings_path()
        pitivi_conf_file_path = os.path.join(pitivi_path, "pitivi.conf")

        for (section, attrname, typ, key, env_var,
             value) in self.iterAllOptions():
            if not self._config.has_section(section):
                self._config.add_section(section)
            if key:
                if value is not None:
                    self._config.set(section, key, str(value))
                else:
                    self._config.remove_option(section, key)
        try:
            file = open(pitivi_conf_file_path, 'w')
        except IOError, OSError:
            return
        self._config.write(file)
        file.close()
Пример #33
0
from nltk import data
import zipimport
import os

REGEX_SEPARATOR = '|||'

config = SafeConfigParser()
config.read("masterchef.ini")

URL_REGEX = 'http://[^\s]+'
EMAIL_REGEX = '^[A-Za-z0-9\.\+_-]+@[A-Za-z0-9\._-]+\.[a-zA-Z]*$'
HASHTAG_REGEX = '(\A|\s)#(\w+)'
MENTION_REGEX = '(\A|\s)@(\w+)'

# Loading cooking section
url_regex = (config.has_option('cooking', 'url_regex')
             and config.get('cooking', 'url_regex')) or URL_REGEX
email_regex = (config.has_option('cooking', 'email_regex')
               and config.get('cooking', 'email_regex')) or EMAIL_REGEX
hashtag_regex = (config.has_option('cooking', 'hashtag_regex')
                 and config.get('cooking', 'hashtag_regex')) or HASHTAG_REGEX
mention_regex = (config.has_option('cooking', 'mention_regex')
                 and config.get('cooking', 'mention_regex')) or MENTION_REGEX
delete = (config.has_option('cooking', 'delete')
          and config.get('cooking', 'delete').split(REGEX_SEPARATOR)) or ''
keywords = (config.has_option('cooking', 'keywords') and config.get(
    'cooking', 'keywords').lower().split(',')) or 'movistar'
oficial_users = (config.has_option('cooking', 'oficial_users') and config.get(
    'cooking', 'oficial_users').lower().split(',')) or 'movistar'
languages = (config.has_option('cooking', 'languages') and config.get(
    'cooking', 'languages').lower().split(',')) or 'spanish'
Пример #34
0
    "['new_dependency1', 'new_dependency2']}\"")
(options, args) = parser.parse_args()

if not options.config or not options.migrations\
        or not reduce(lambda a, b: a and (b in migrations),
                      options.migrations.split(','),
                      True):
    parser.print_help()
    sys.exit()

config.read(options.config)

conn_parms = {}
for parm in ('host', 'port', 'user', 'password'):
    db_parm = 'db_' + parm
    if config.has_option('options', db_parm):
        conn_parms[parm] = config.get('options', db_parm)

if 'user' not in conn_parms:
    print 'No user found in configuration'
    sys.exit()

db_name = options.database or config.get('options', 'db_name')

if not db_name or db_name == '' or db_name.isspace()\
        or db_name.lower() == 'false':
    parser.print_help()
    sys.exit()

conn_parms['database'] = db_name
Пример #35
0
def main(args=sys.argv):
    opt_parser = optparse.OptionParser(
        usage="usage: %prog [-c CONFIG] [-b BOT]",
        description="Manage bot playing multiple postal games.")
    opt_parser.add_option('-c', '--config',
                          default="gameroom.cfg",
                          help="Configuration file to use.")
    opt_parser.add_option('-b', '--bot',
                          help="Bot section to use as the default.")
    options, args = opt_parser.parse_args(args)
    if len(args) > 1:
        print "Unrecognized command line arguments", args[1:]
        return 1
    config = SafeConfigParser()
    try:
        config.readfp(open(options.config, 'rU'))
    except IOError:
        print "Could not open '%s'." % (options.config, )
        return 1

    try:
        log_dir = config.get("postal", "log_dir")
    except ConfigParser.Error:
        try:
            log_dir = config.get("Logging", "directory")
        except ConfigParser.Error:
            log_dir = "."
    if not os.path.exists(log_dir):
        print "Log directory '%s' not found, attempting to create it." % (
            log_dir
        )
        os.makedirs(log_dir)

    try:
        log_filename = config.get("postal", "log_file")
    except ConfigParser.Error:
        log_filename = "postal-" + time.strftime("%Y-%m") + ".log"
    log_path = os.path.join(log_dir, log_filename)
    logfmt = logging.Formatter(fmt="%(asctime)s %(levelname)s: %(message)s",
                               datefmt="%Y-%m-%d %H:%M:%S")
    loghandler = logging.FileHandler(log_path)
    loghandler.setFormatter(logfmt)
    log.addHandler(loghandler)
    consolehandler = logging.StreamHandler()
    consolehandler.setFormatter(logfmt)
    log.addHandler(consolehandler)
    log.propagate = False
    gameroom.init_logging(config)

    gameroom_url = config.get("global", "gameroom_url")
    if options.bot:
        bot_section = options.bot
    else:
        bot_section = config.get("global", "default_engine")
    try:
        bot_username = config.get(bot_section, "username")
        bot_password = config.get(bot_section, "password")
    except ConfigParser.Error:
        try:
            bot_username = config.get("global", "username")
            bot_password = config.get("global", "password")
        except NoOptionError:
            log.error("Could not find username/password in config.")
            return 1

    while True:
        try:
            open("stop_postal", 'r')
            log.info("Exiting after finding stop file")
            sys.exit()
        except IOError:
            pass
        gr_con = gameroom.GameRoom(gameroom_url)
        gr_con.login(bot_username, bot_password)
        games = gr_con.mygames()
        gr_con.logout()
        total_games = len(games)
        games = [g for g in games if g['postal'] == '1']
        postal_games = len(games)
        games = [g for g in games if g['turn'] == g['side']]
        my_turn_games = len(games)
        log.info("Found %d games with %d postal games and %d on my turn." %
                 (total_games, postal_games, my_turn_games))
        if games:
            games.sort(key=lambda x: x['turnts'])
            for game_num, game in enumerate(games):
                try:
                    open("stop_postal", 'r')
                    log.info("Exiting after finding stop file")
                    sys.exit()
                except IOError:
                    pass
                log.info("%d/%d: Playing move against %s game #%s" %
                         (game_num + 1, my_turn_games, game['player'],
                          game['gid']))
                game_args = ["gameroom", "move", game['gid'], game['side']]
                if config.has_option("postal", game['gid']):
                    section = config.get("postal", game['gid'])
                    game_args += ["-b", section]
                    log.info("Using section %s for use with gid #%s" %
                             (section, game['gid']))
                elif config.has_option("postal", game['player']):
                    section = config.get("postal", game['player'])
                    game_args += ["-b", section]
                    log.info("Using section %s for use against %s" %
                             (section, game['player']))
                gmoptions = gameroom.parseargs(game_args)
                res = gameroom.run_game(gmoptions, config)
                if res is not None and res != 0:
                    log.warning("Error result from gameroom run %d." % (res, ))
        else:
            log.info("No postal games with a turn found, sleeping.")
            time.sleep(300)
Пример #36
0
import db, transaction, random, logging, os, sys, time, traceback
import time, popen2, re, copy, thread, fcntl, socket
from logging import debug, error
from ZODB.FileStorage import FileStorage
from ZODB import DB, POSException
from ConfigParser import SafeConfigParser

logging.getLogger().setLevel(logging.DEBUG)
logging.getLogger('ZEO').setLevel(logging.INFO)
config = SafeConfigParser()
config.read(os.path.expanduser('~/.ev'))
donedir = config.get('cluster', 'donedir')  # must be absolute path, not ~/...
if donedir[-1] != '/':
    donedir += '/'
zeofile = None
if config.has_option('cluster', 'zeofile'):
    zeofile = os.path.expanduser(config.get('cluster', 'zeofile'))
tmp = os.environ.get('TMPDIR')
if not tmp:
    tmp = '/tmp/'
if config.has_option('cluster', 'tmpdir'):
    tmp = config.get('cluster', 'tmpdir')
if tmp[-1] != '/':
    tmp += '/'
tmp += socket.gethostname() + '/'
try:
    if not os.path.exists(tmp):
        os.mkdir(tmp)
except:
    pass
pat = r'[pb]\d\d\d$'
Пример #37
0
class Config(object):
    # S3 settings
    AWS_ACCESS_KEY_CONFIG = ('aws', 'access_key', 'AWS_ACCESS_KEY')
    AWS_SECRET_KEY_CONFIG = ('aws', 'secret_key', 'AWS_SECRET_KEY')
    AWS_TEST_RESULT_BUCKET_CONFIG = ('aws', 'test_result_bucket',
                                     'TEST_RESULT_BUCKET')

    # MySQL settings
    MYSQL_HOST_CONFIG = ('mysql', 'host', 'MYSQL_HOST')
    MYSQL_PORT_CONFIG = ('mysql', 'port', 'MYSQL_PORT')
    MYSQL_USER_CONFIG = ('mysql', 'user', 'MYSQL_USER')
    MYSQL_PWD_CONFIG = ('mysql', 'password', 'MYSQL_PWD')
    MYSQL_DB_CONFIG = ('mysql', 'database', 'MYSQL_DB')

    # Isolate settings
    ISOLATE_HOME_CONFIG = ('isolate', 'home', "ISOLATE_HOME")
    ISOLATE_SERVER_CONFIG = ('isolate', 'server', "ISOLATE_SERVER")
    ISOLATE_CACHE_DIR_CONFIG = ('isolate', 'cache_dir', "ISOLATE_CACHE_DIR")

    # Beanstalk settings
    BEANSTALK_HOST_CONFIG = ('beanstalk', 'host', 'BEANSTALK_HOST')

    # Dist test settings
    DIST_TEST_MASTER_CONFIG = ('dist_test', 'master', "DIST_TEST_MASTER")
    DIST_TEST_JOB_PATH_CONFIG = ('dist_test', 'job_path', 'DIST_TEST_JOB_PATH')
    DIST_TEST_USER_CONFIG = ('dist_test', 'user', 'DIST_TEST_USER')
    DIST_TEST_PASSWORD_CONFIG = ('dist_test', 'password', 'DIST_TEST_PASSWORD')
    DIST_TEST_RESULT_SERVER_CONFIG = ('dist_test', 'result_server',
                                      'DIST_TEST_RESULT_SERVER')
    DIST_TEST_TEMP_DIR_CONFIG = ('dist_test', 'temp_dir', 'DIST_TEST_TEMP_DIR')

    def __init__(self, path=None):
        if path is None:
            path = os.getenv("DIST_TEST_CNF")
        if path is None:
            path = os.path.join(os.getenv("HOME"), ".dist_test.cnf")
        logging.info("Reading configuration from %s", path)
        # Populate parser with default values
        defaults = {
            "log_dir":
            os.path.join(os.path.dirname(os.path.realpath(__file__)), "logs"),
            "submit_gce_metrics":
            "True",
            "allowed_ip_ranges":
            "0.0.0.0/0",
            "accounts":
            "{}",
        }
        self.config = SafeConfigParser(defaults)
        self.config.read(path)

        # Isolate settings
        self.ISOLATE_HOME = self._get_with_env_override(
            *self.ISOLATE_HOME_CONFIG)
        self.ISOLATE_SERVER = self._get_with_env_override(
            *self.ISOLATE_SERVER_CONFIG)
        self.ISOLATE_CACHE_DIR = self._get_with_env_override(
            *self.ISOLATE_CACHE_DIR_CONFIG)

        # S3 settings
        self.AWS_ACCESS_KEY = self._get_with_env_override(
            *self.AWS_ACCESS_KEY_CONFIG)
        self.AWS_SECRET_KEY = self._get_with_env_override(
            *self.AWS_SECRET_KEY_CONFIG)
        self.AWS_TEST_RESULT_BUCKET = self._get_with_env_override(
            *self.AWS_TEST_RESULT_BUCKET_CONFIG)

        # MySQL settings
        self.MYSQL_HOST = self._get_with_env_override(*self.MYSQL_HOST_CONFIG)
        try:
            self.MYSQL_PORT = int(
                self._get_with_env_override(*self.MYSQL_PORT_CONFIG))
        except:
            self.MYSQL_PORT = 3306
        self.MYSQL_USER = self._get_with_env_override(*self.MYSQL_USER_CONFIG)
        self.MYSQL_PWD = self._get_with_env_override(*self.MYSQL_PWD_CONFIG)
        self.MYSQL_DB = self._get_with_env_override(*self.MYSQL_DB_CONFIG)

        # Beanstalk settings
        self.BEANSTALK_HOST = self._get_with_env_override(
            *self.BEANSTALK_HOST_CONFIG)

        # dist_test settings
        if not self.config.has_section('dist_test'):
            self.config.add_section('dist_test')
        self.DIST_TEST_MASTER = self._get_with_env_override(
            *self.DIST_TEST_MASTER_CONFIG)
        self.DIST_TEST_JOB_PATH = self._get_with_env_override(
            *self.DIST_TEST_JOB_PATH_CONFIG)
        if self.DIST_TEST_JOB_PATH is None:
            self.DIST_TEST_JOB_PATH = os.path.expanduser(
                "~/.dist-test-last-job")
        self.DIST_TEST_USER = self._get_with_env_override(
            *self.DIST_TEST_USER_CONFIG)
        self.DIST_TEST_PASSWORD = self._get_with_env_override(
            *self.DIST_TEST_PASSWORD_CONFIG)

        # dist_test master configs (in the 'dist_test' section)
        self.DIST_TEST_ALLOWED_IP_RANGES = self.config.get(
            'dist_test', 'allowed_ip_ranges')
        self.ACCOUNTS = self.config.get('dist_test', 'accounts')

        self.log_dir = self.config.get('dist_test', 'log_dir')
        # Make the log directory if it doesn't exist
        Config.mkdir_p(self.log_dir)
        # dist_test result server settings
        self.DIST_TEST_RESULT_SERVER = self._get_with_env_override(
            *self.DIST_TEST_RESULT_SERVER_CONFIG)
        self.DIST_TEST_TEMP_DIR = self._get_with_env_override(
            *self.DIST_TEST_TEMP_DIR_CONFIG)

        self.SERVER_ACCESS_LOG = os.path.join(self.log_dir,
                                              "server-access.log")
        self.SERVER_ERROR_LOG = os.path.join(self.log_dir, "server-error.log")
        self.SERVER_LOG = os.path.join(self.log_dir, "server.log")
        self.SLAVE_LOG = os.path.join(self.log_dir, "slave.log")

    @staticmethod
    def mkdir_p(path):
        """Similar to mkdir -p, make a directory ignoring EEXIST"""
        try:
            os.makedirs(path)
        except OSError as exc:
            if exc.errno == errno.EEXIST and os.path.isdir(path):
                pass
            else:
                raise

    def _get_with_env_override(self, section, option, env_key):
        env_value = os.environ.get(env_key)
        if env_value is not None:
            return env_value
        file_value = None
        if self.config.has_option(section, option):
            file_value = self.config.get(section, option)
        return file_value

    def ensure_aws_configured(self):
        self._ensure_configs([
            self.AWS_ACCESS_KEY_CONFIG, self.AWS_SECRET_KEY_CONFIG,
            self.AWS_TEST_RESULT_BUCKET_CONFIG
        ])

    def ensure_isolate_configured(self):
        self._ensure_configs([
            self.ISOLATE_HOME_CONFIG, self.ISOLATE_SERVER_CONFIG,
            self.ISOLATE_CACHE_DIR_CONFIG
        ])

    def ensure_mysql_configured(self):
        self._ensure_configs([
            self.MYSQL_HOST_CONFIG, self.MYSQL_USER_CONFIG,
            self.MYSQL_PWD_CONFIG, self.MYSQL_DB_CONFIG
        ])

    def ensure_beanstalk_configured(self):
        self._ensure_configs([self.BEANSTALK_HOST_CONFIG])

    def ensure_dist_test_configured(self):
        self._ensure_configs([self.DIST_TEST_MASTER_CONFIG])

    def ensure_result_server_configured(self):
        self._ensure_configs([self.DIST_TEST_RESULT_SERVER_CONFIG])

    def _ensure_configs(self, configs):
        for config in configs:
            if self._get_with_env_override(*config) is None:
                raise Exception((
                    "Missing configuration %s.%s. Please set in the config file or "
                    + "set the environment variable %s.") % config)

    def configure_auth(self):
        """
    Configure urllib2 to pass authentication information if provided
    in the configuration.
    """
        if not self.DIST_TEST_USER:
            return
        password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
        password_mgr.add_password(None, self.DIST_TEST_MASTER,
                                  self.DIST_TEST_USER, self.DIST_TEST_PASSWORD)
        handler = urllib2.HTTPDigestAuthHandler(password_mgr)
        opener = urllib2.build_opener(handler)
        urllib2.install_opener(opener)
Пример #38
0
        if "--version" in sys.argv:
            print "Versión: ", __version__

        if len(sys.argv)>1 and sys.argv[1].endswith(".ini"):
            CONFIG_FILE = sys.argv[1]
            print "Usando configuracion:", CONFIG_FILE
         
        config = SafeConfigParser()
        config.read(CONFIG_FILE)
        CERT = config.get('WSAA','CERT')
        PRIVATEKEY = config.get('WSAA','PRIVATEKEY')
        CUIT = config.get('WSLTV','CUIT')
        ENTRADA = config.get('WSLTV','ENTRADA')
        SALIDA = config.get('WSLTV','SALIDA')
        
        if config.has_option('WSAA','URL') and not H**O:
            WSAA_URL = config.get('WSAA','URL')
        else:
            WSAA_URL = None #wsaa.WSAAURL
        if config.has_option('WSLTV','URL') and not H**O:
            WSLTV_URL = config.get('WSLTV','URL')
        else:
            WSLTV_URL = WSDL

        PROXY = config.has_option('WSAA', 'PROXY') and config.get('WSAA', 'PROXY') or None
        CACERT = config.has_option('WSAA', 'CACERT') and config.get('WSAA', 'CACERT') or None
        WRAPPER = config.has_option('WSAA', 'WRAPPER') and config.get('WSAA', 'WRAPPER') or None
        
        if config.has_section('DBF'):
            conf_dbf = dict(config.items('DBF'))
            if DEBUG: print "conf_dbf", conf_dbf
Пример #39
0
def process_options(options, gw_data_source_opts, pipeline, mainloop):
    # Locate and load the initialization file
    if not options.infile:
        print >> sys.stderr, "Initialization file required."
    elif not os.path.exists(options.infile):
        print >> sys.stderr, "Initialization file path is invalid."
        sys.exit(-1)

    cfg = SafeConfigParser()
    cfg.read(options.infile)

    #
    # This supplants the ligo_data_find step and is mostly convenience
    #
    # TODO: Move to a utility library

    if gw_data_source_opts.data_source == "frames" and gw_data_source_opts.frame_cache is None:
        if gw_data_source_opts.seg is None:
            sys.exit(
                "No frame cache present, and no GPS times set. Cannot query for data without an interval to query in."
            )

        # Shamelessly stolen from gw_data_find
        print "Querying LDR server for data location."
        try:
            server, port = os.environ["LIGO_DATAFIND_SERVER"].split(":")
        except ValueError:
            sys.exit("Invalid LIGO_DATAFIND_SERVER environment variable set")
        print "Server is %s:%s" % (server, port)

        try:
            frame_type = cfg.get("instrument", "frame_type")
        except ConfigParser.NoOptionError:
            sys.exit(
                "Invalid cache location, and no frame type set, so I can't query LDR for the file locations."
            )
        if frame_type == "":
            sys.exit("No frame type set, aborting.")

        print "Frame type is %s" % frame_type
        connection = datafind.GWDataFindHTTPConnection(host=server, port=port)
        print "Equivalent command line is "
        # FIXME: Multiple instruments?
        inst = gw_data_source_opts.channel_dict.keys()[0]
        print "gw_data_find -o %s -s %d -e %d -u file -t %s" % (
            inst[0], gw_data_source_opts.seg[0], gw_data_source_opts.seg[1],
            frame_type)
        cache = connection.find_frame_urls(inst[0],
                                           frame_type,
                                           gw_data_source_opts.seg[0],
                                           gw_data_source_opts.seg[1],
                                           urltype="file",
                                           on_gaps="error")

        tmpfile, tmpname = tempfile.mkstemp()
        print "Writing cache of %d files to %s" % (len(cache), tmpname)
        with open(tmpname, "w") as tmpfile:
            cache.tofile(tmpfile)
        connection.close()
        gw_data_source_opts.frame_cache = tmpname

    handler = EPHandler(mainloop, pipeline)

    # Enable the periodic output of trigger statistics
    if options.channel_monitoring:
        handler.channel_monitoring = True

    # If a sample rate other than the native rate is requested, we'll need to
    # keep track of it
    if options.sample_rate is not None:
        handler.rate = options.sample_rate

    # Does the user want a cache file to track the trigger files we spit out?
    # And if so, if you give us a name, we'll update it every time we output,
    # else only at the end of the run
    if options.file_cache_name is not None:
        handler.output_cache_name = options.file_cache_name

    # Clustering on/off
    handler.clustering = options.clustering
    # Be verbose?
    handler.verbose = options.verbose

    # Instruments and channels
    # FIXME: Multiple instruments
    if len(gw_data_source_opts.channel_dict.keys()) == 1:
        handler.inst = gw_data_source_opts.channel_dict.keys()[0]
    else:
        sys.exit("Unable to determine instrument.")

    # FIXME: Multiple instruments
    if gw_data_source_opts.channel_dict[handler.inst] is not None:
        handler.channel = gw_data_source_opts.channel_dict[handler.inst]
    else:
        # TODO: In the future, we may request multiple channels for the same
        # instrument -- e.g. from a single raw frame
        sys.exit("Unable to determine channel.")
    print "Channel name(s): " + handler.channel

    # FFT and time-frequency parameters
    # Low frequency cut off -- filter bank begins here
    handler.flow = cfg.getfloat("tf_parameters", "min-frequency")
    # High frequency cut off -- filter bank ends here
    handler.fhigh = cfg.getfloat("tf_parameters", "max-frequency")
    # Frequency resolution of the finest filters
    handler.base_band = cfg.getfloat("tf_parameters", "min-bandwidth")
    # Tile duration should not exceed this value
    handler.max_duration = cfg.getfloat("tf_parameters", "max-duration")
    # Number of resolutions levels. Can't be less than 1, and can't be greater
    # than log_2((fhigh-flow)/base_band)
    handler.max_bandwidth = cfg.getfloat("tf_parameters", "max-bandwidth")
    handler.max_level = int(
        math.floor(math.log(handler.max_bandwidth / handler.base_band, 2))) + 1
    # Frequency band overlap -- in our case, handler uses 1 - frequency overlap
    if options.frequency_overlap > 1 or options.frequency_overlap < 0:
        sys.exit("Frequency overlap must be between 0 and 1.")
    handler.frequency_overlap = options.frequency_overlap

    # DOF options -- this affects which tile types will be calculated
    if cfg.has_option("tf_parameters", "max-dof"):
        handler.max_dof = cfg.getint("tf_parameters", "max-dof")
    if cfg.has_option("tf_parameters", "fix-dof"):
        handler.fix_dof = cfg.getint("tf_parameters", "fix-dof")

    if cfg.has_option("tf_parameters", "fft-length"):
        handler.fft_length = cfg.getfloat("tf_parameters", "fft-length")

    if cfg.has_option("cache", "cache-psd-every"):
        handler.cache_psd = cfg.getint("cache", "cache-psd-every")
        print "PSD caching enabled. PSD will be recorded every %d seconds" % handler.cache_psd
    else:
        handler.cache_psd = None

    if cfg.has_option("cache", "cache-psd-dir"):
        handler.cache_psd_dir = cfg.get("cache", "cache-psd-dir")
        print "Caching PSD to %s" % handler.cache_psd_dir

    # Used to keep track if we need to lock the PSD into the whitener
    psdfile = None
    if cfg.has_option("cache", "reference-psd"):
        psdfile = cfg.get("cache", "reference-psd")
        try:
            handler.psd = lal.series.read_psd_xmldoc(
                ligolw_utils.load_filename(
                    psdfile,
                    contenthandler=lal.series.PSDContentHandler))[handler.inst]
            print "Reference PSD for instrument %s from file %s loaded" % (
                handler.inst, psdfile)
            # Reference PSD disables caching (since we already have it)
            handler.cache_psd = None
            handler.psd_mode = 1
        except KeyError:  # Make sure we have a PSD for this instrument
            sys.exit(
                "PSD for instrument %s requested, but not found in file %s. Available instruments are %s"
                % (handler.inst, psdfile, str(handler.psd.keys())))

    # Triggering options
    if cfg.has_option("triggering", "output-file-stride"):
        handler.dump_frequency = cfg.getint("triggering", "output-file-stride")
    if cfg.has_option("triggering", "output-directory"):
        handler.outdir = cfg.get("triggering", "output-directory")
    if cfg.has_option("triggering", "output-dir-format"):
        handler.outdirfmt = cfg.get("triggering", "output-dir-format")

    handler.output = not options.disable_triggers

    # FAP thresh overrides SNR thresh, because multiple resolutions will have
    # different SNR thresholds, nominally.
    if cfg.has_option("triggering", "snr-thresh"):
        handler.snr_thresh = cfg.getfloat("triggering", "snr-thresh")
    if cfg.has_option("triggering", "fap-thresh"):
        handler.fap = cfg.getfloat("triggering", "fap-thresh")

    if handler.fap is not None:
        print "False alarm probability threshold (in Gaussian noise) is %g" % handler.fap
    if handler.snr_thresh is not None:
        print "Trigger SNR threshold sqrt(E/ndof-1) is %f" % handler.snr_thresh

    # Maximum number of events (+/- a few in the buffer) before which we drop an
    # output file
    if cfg.has_option("triggering", "events_per_file"):
        handler.max_events = cfg.get_int("triggering", "events_per_file")

    return handler
Пример #40
0
        }
        if config.has_option('database', 'ecm_host') and config.get('database', 'ecm_host'):
            db_config['HOST'] = config.get('database', 'ecm_host')
        if config.has_option('database', 'ecm_port') and config.get('database', 'ecm_port'):
            db_config['PORT'] = config.get('database', 'ecm_port')
        
        return db_config 

DATABASES = { # see http://docs.djangoproject.com/en/1.3/ref/settings/#databases
    'default': get_db_config(),
}

SITE_ID = 1

ALLOWED_HOSTS = [ '127.0.0.1', 'localhost' ]
if config.has_option('misc', 'external_host_name'):
    ALLOWED_HOSTS += config.get('misc', 'external_host_name').split()


##########
# E-MAIL #
##########
# to enable email error reporting, add tuples in there, ('name', '*****@*****.**')
ADMINS = [ ('', email) for email in config.get('email', 'admin_email').split() ]

# for development, you can use python dummy smtp server, run this command:
# >>> python -m smtpd -n -c DebuggingServer localhost:25
EMAIL_HOST = config.get('email', 'host')
EMAIL_PORT = config.getint('email', 'port')
EMAIL_USE_TLS = config.getboolean('email', 'use_tls')
EMAIL_HOST_USER = config.get('email', 'host_user')
Пример #41
0
        for arg in sys.argv[1:]:
            if arg.startswith("--"):
                break
            print "Usando configuración:", arg
            CONFIG_FILE = arg

        config = SafeConfigParser()
        config.read(CONFIG_FILE)
        CERT = config.get('WSAA', 'CERT')
        PRIVATEKEY = config.get('WSAA', 'PRIVATEKEY')
        CUIT = config.get('WSRemHarina', 'CUIT')
        ENTRADA = config.get('WSRemHarina', 'ENTRADA')
        SALIDA = config.get('WSRemHarina', 'SALIDA')

        if config.has_option('WSAA', 'URL') and not H**O:
            wsaa_url = config.get('WSAA', 'URL')
        else:
            wsaa_url = None
        if config.has_option('WSRemHarina', 'URL') and not H**O:
            wsremharina_url = config.get('WSRemHarina', 'URL')
        else:
            wsremharina_url = WSDL[H**O]

        if config.has_section('DBF'):
            conf_dbf = dict(config.items('DBF'))
            if DEBUG: print "conf_dbf", conf_dbf
        else:
            conf_dbf = {}

        DEBUG = '--debug' in sys.argv
Пример #42
0
print 'Training Settings:'
print
print 'Data Path: '.ljust(off), dataPath
print 'Model Path: '.ljust(off), modelPath
print 'Participants: '.ljust(off), participantList
print 'Model Root Name: '.ljust(off), trainName
print 'Training Mode:'.ljust(off), mode
print '-------------------'
print 'Loading Parameters...'
print

try:
    parser = SafeConfigParser()
    found = parser.read(dataPath + "/config.ini")

    if (parser.has_option(trainName, 'update_mode')):
        modeConfig = parser.get(trainName, 'update_mode')
    else:
        modeConfig = 'update'
except IOError:
    pass

if (mode == 'new' or modeConfig == 'new'
        or '.pickle' not in modelPath):  #or update but no .pickle
    print 'Loading training parameters from: \n ', '\t' + dataPath + "/config.ini"
    try:
        parser = SafeConfigParser()
        found = parser.read(dataPath + "/config.ini")

        #load parameters from config file
        if (parser.has_option(trainName, 'experiment_number')):
Пример #43
0
def makeGraph(plotvalues, ytitle, legendnames, cmslabel):
    styleconfig = SafeConfigParser()
    logging.debug("Loading style config")
    styleconfig.read("configs/style.cfg")

    import ROOT

    color = [ROOT.kRed, ROOT.kBlue, ROOT.kGreen + 2, ROOT.kViolet]

    graphs = []
    npoints = 0
    xLabelText = None
    for icurve, curve in enumerate(plotvalues):
        xlabels, xvalues, yvalues = curve
        xLabelText = xlabels
        graph = ROOT.TGraph(len(xvalues), xvalues, yvalues)
        npoints = len(xvalues)
        graph.SetLineColor(color[icurve])
        graph.SetLineWidth(2)
        graph.SetMarkerColor(color[icurve])
        graph.SetMarkerSize(1)
        graph.SetMarkerStyle(21)

        graphs.append(graph)

    c1 = ROOT.TCanvas("c1", "c1", 800, 640)
    c1.SetTopMargin(0.05)
    c1.SetRightMargin(0.05)
    c1.SetBottomMargin(0.15)
    c1.cd()
    c1.SetGrid()

    #SetLogy()

    leg = ROOT.TLegend(0.75, 0.45, 0.95, 0.7)
    leg.SetBorderSize(0)
    leg.SetFillStyle(0)
    leg.SetTextFont(42)
    for igraph, graph in enumerate(graphs):
        logging.debug("Adding graph {0}".format(igraph))
        if igraph == 0:
            graph.Draw("AP")
            graph.GetHistogram().SetMinimum(0.0)
            graph.GetHistogram().SetMaximum(graph.GetHistogram().GetMaximum() *
                                            1.1)
            graph.GetXaxis().SetLimits(-1, npoints)
            graph.GetXaxis().SetLabelOffset(0.04)
            graph.GetXaxis().SetNdivisions(-(npoints + 1))
            graph.SetTitle("")
            if styleconfig.has_option("Renaming", ytitle):
                ytitle = styleconfig.get("Renaming", ytitle)
            graph.GetYaxis().SetTitle(ytitle)
            graph.GetYaxis().SetTitleOffset(graph.GetYaxis().GetTitleOffset() *
                                            2)
            xax = graph.GetXaxis()
            #xax
            xLabelText = ["", " "] + xLabelText + [" "]
            for i in range(npoints + 3):
                #binIndex = xax.FindBin(i)
                binIndex = i
                xax.ChangeLabel(binIndex, 45, -1, -1, -1, -1,
                                str(xLabelText[i]))
                #xax.SetBinLabel(binIndex, str(xLabelText[i]))
            #graph.GetXaxis().LabelsOption("u");
            graph.GetXaxis().SetTitle()
            graph.Draw("AP")
        else:
            graph.Draw("P")
        leg.AddEntry(graph, legendnames[igraph], "p")

    if len(graphs) > 1:
        leg.Draw("same")

    a, b = cmstext(cmslabel)
    a.Draw("same")
    b.Draw("same")

    c1.Update()
    raw_input("")
Пример #44
0
class Config(object):
    """
    Manages the configuration file
    """
    def __init__(self):
        """
        DEFAULT VALUES
        """
        self._basescript = None
        self.recentvaults = []
        self.pwlength = 10
        self.reduction = False
        self.search_notes = False
        self.search_passwd = False
        self.alphabet = "abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_"

        self._fname = self.get_config_filename()
        self._parser = SafeConfigParser()

        if os.path.exists(self._fname):
            self._parser.read(self._fname)

        if not self._parser.has_section("base"):
            self._parser.add_section("base")

        for num in range(10):
            if (not self._parser.has_option("base",
                                            "recentvaults" + str(num))):
                break
            self.recentvaults.append(
                self._parser.get("base", "recentvaults" + str(num)))

        if self._parser.has_option("base", "alphabet"):
            self.alphabet = int(self._parser.get("base", "alphabet"))

        if self._parser.has_option("base", "pwlength"):
            self.pwlength = int(self._parser.get("base", "pwlength"))

        if self._parser.has_option("base", "alphabetreduction"):
            if self._parser.get("base", "alphabetreduction") == "True":
                self.reduction = True

        if self._parser.has_option("base", "search_notes"):
            if self._parser.get("base", "search_notes") == "True":
                self.search_notes = True

        if self._parser.has_option("base", "search_passwd"):
            if self._parser.get("base", "search_passwd") == "True":
                self.search_passwd = True

        if not os.path.exists(self._fname):
            self.save()

    def set_basescript(self, basescript):
        self._basescript = basescript

    def get_basescript(self):
        return self._basescript

    def save(self):
        if (not os.path.exists(os.path.dirname(self._fname))):
            os.mkdir(os.path.dirname(self._fname))

        # remove duplicates and trim to 10 items
        _saved_recentvaults = []
        for item in self.recentvaults:
            if item in _saved_recentvaults:
                continue
            self._parser.set("base",
                             "recentvaults" + str(len(_saved_recentvaults)),
                             item)
            _saved_recentvaults.append(item)
            if (len(_saved_recentvaults) >= 10):
                break

        self._parser.set("base", "pwlength", str(self.pwlength))
        self._parser.set("base", "alphabetreduction", str(self.reduction))
        self._parser.set("base", "search_notes", str(self.search_notes))
        self._parser.set("base", "search_passwd", str(self.search_passwd))
        filehandle = open(self._fname, 'w')
        self._parser.write(filehandle)
        filehandle.close()

    @staticmethod
    def get_config_filename():
        """
        Returns the full filename of the config file
        """
        base_fname = "loxodo"

        # On Mac OS X, config files go to ~/Library/Application Support/foo/
        if platform.system() == "Darwin":
            base_path = os.path.join(os.path.expanduser("~"), "Library",
                                     "Application Support")
            if os.path.isdir(base_path):
                return os.path.join(base_path, base_fname, base_fname + ".ini")

        # On Microsoft Windows, config files go to $APPDATA/foo/
        if platform.system() in ("Windows", "Microsoft"):
            if ("APPDATA" in os.environ):
                base_path = os.environ["APPDATA"]
                if os.path.isdir(base_path):
                    return os.path.join(base_path, base_fname,
                                        base_fname + ".ini")

        # Allow config directory override as per freedesktop.org XDG Base Directory Specification
        if ("XDG_CONFIG_HOME" in os.environ):
            base_path = os.environ["XDG_CONFIG_HOME"]
            if os.path.isdir(base_path):
                return os.path.join(base_path, base_fname, base_fname + ".ini")

        # Default configuration path is ~/.config/foo/
        base_path = os.path.join(os.path.expanduser("~"), ".config")
        if os.path.isdir(base_path):
            return os.path.join(base_path, base_fname, base_fname + ".ini")
        else:
            return os.path.join(os.path.expanduser("~"),
                                "." + base_fname + ".ini")
Пример #45
0
fBand = fFullBand / args.numJobs
fStart = fMin + args.jobNum * fBand

toplistPattern = cp.get('filename-patterns','toplist_name')
toplistName = toplistPattern % (args.jobNum,args.numJobs)

logfilePattern = cp.get('filename-patterns','logfile_name')
logfileName = logfilePattern % (args.jobNum,args.numJobs)

# Pass along the arguments from the ini file
program_args = ['--%s=%s' % a for a in cp.items('raw-program-arguments')]

# Add calculated frequency band
program_args += ['--fStart=%.11f' % fStart]
program_args += ['--fBand=%.11f' % fBand]
program_args += ['--toplistFilename=%s' % toplistName]
program_args += ['--logFilename=%s' % logfileName]

# Variable delay to stagger start times of lalapps code
if cp.has_section('program') and cp.has_option('program','delay_secs'):
    sleep(args.jobNum * cp.getfloat('program','delay_secs'))

# Check if program was specified
if cp.has_section('program') and cp.has_option('program','executable'):
    program = cp.get('program','executable')
else:
    program = 'lalapps_pulsar_crosscorr_v2'

check_call(([program]+program_args))

Пример #46
0
def default_bitcoind_opts(config_file=None):
    """
   Get our default bitcoind options, such as from a config file, 
   or from sane defaults 
   """

    bitcoind_server = None
    bitcoind_port = None
    bitcoind_user = None
    bitcoind_passwd = None
    bitcoind_use_https = None

    loaded = False

    if config_file is not None:

        parser = SafeConfigParser()
        parser.read(config_file)

        if parser.has_section('bitcoind'):

            if parser.has_option('bitcoind', 'server'):
                bitcoind_server = parser.get('bitcoind', 'server')

            if parser.has_option('bitcoind', 'port'):
                bitcoind_port = parser.get('bitcoind', 'port')

            if parser.has_option('bitcoind', 'user'):
                bitcoind_user = parser.get('bitcoind', 'user')

            if parser.has_option('bitcoind', 'passwd'):
                bitcoind_passwd = parser.get('bitcoind', 'passwd')

            if parser.has_option('bitcoind', 'use_https'):
                use_https = parser.get('bitcoind', 'use_https')
            else:
                use_https = 'no'

            if use_https.lower() in ["yes", "y", "true"]:
                bitcoind_use_https = True
            else:
                bitcoind_use_https = False

            loaded = True

    if not loaded:

        if TESTNET:
            bitcoind_server = "localhost"
            bitcoind_port = DEFAULT_BITCOIND_PORT_TESTNET
            bitcoind_user = DEFAULT_BITCOIND_USERNAME
            bitcoind_passwd = DEFAULT_BITCOIND_PASSWD
            bitcoind_use_https = False

        else:
            bitcoind_server = DEFAULT_BITCOIND_SERVER
            bitcoind_port = DEFAULT_BITCOIND_PORT
            bitcoind_user = DEFAULT_BITCOIND_USERNAME
            bitcoind_passwd = DEFAULT_BITCOIND_PASSWD
            bitcoind_use_https = True

    default_bitcoin_opts = {
        "bitcoind_user": bitcoind_user,
        "bitcoind_passwd": bitcoind_passwd,
        "bitcoind_server": bitcoind_server,
        "bitcoind_port": bitcoind_port,
        "bitcoind_use_https": bitcoind_use_https,
    }

    # strip None's
    for (k, v) in default_bitcoin_opts.items():
        if v is None:
            del default_bitcoin_opts[k]

    return default_bitcoin_opts
Пример #47
0
def cli():
    parser = argparse.ArgumentParser(
        description='Check HTTPs rules for validity')
    parser.add_argument('checker_config',
                        help='an integer for the accumulator')
    parser.add_argument('rule_files',
                        nargs="*",
                        default=[],
                        help="Specific XML rule files")
    parser.add_argument('--json_file',
                        default=None,
                        help='write results in json file')
    args = parser.parse_args()

    config = SafeConfigParser()
    config.read(args.checker_config)

    logfile = config.get("log", "logfile")
    loglevel = convertLoglevel(config.get("log", "loglevel"))
    if logfile == "-":
        logging.basicConfig(stream=sys.stderr,
                            level=loglevel,
                            format="%(levelname)s %(message)s")
    else:
        logging.basicConfig(
            filename=logfile,
            level=loglevel,
            format=
            "%(asctime)s %(levelname)s %(message)s [%(pathname)s:%(lineno)d]")

    autoDisable = False
    if config.has_option("rulesets", "auto_disable"):
        autoDisable = config.getboolean("rulesets", "auto_disable")
    # Test rules even if they have default_off=...
    includeDefaultOff = False
    if config.has_option("rulesets", "include_default_off"):
        includeDefaultOff = config.getboolean("rulesets",
                                              "include_default_off")
    ruledir = config.get("rulesets", "rulesdir")
    checkCoverage = False
    if config.has_option("rulesets", "check_coverage"):
        checkCoverage = config.getboolean("rulesets", "check_coverage")
    checkNonmatchGroups = False
    if config.has_option("rulesets", "check_nonmatch_groups"):
        checkNonmatchGroups = config.getboolean("rulesets",
                                                "check_nonmatch_groups")
    checkTestFormatting = False
    if config.has_option("rulesets", "check_test_formatting"):
        checkTestFormatting = config.getboolean("rulesets",
                                                "check_test_formatting")
    certdir = config.get("certificates", "basedir")
    if config.has_option("rulesets", "skiplist"):
        skiplist = config.get("rulesets", "skiplist")
        with open(skiplist) as f:
            for line in f:
                fileHash = line.split(" ")[0]
                skipdict[binascii.unhexlify(fileHash)] = 1

    threadCount = config.getint("http", "threads")
    httpEnabled = True
    if config.has_option("http", "enabled"):
        httpEnabled = config.getboolean("http", "enabled")

    #get all platform dirs, make sure "default" is among them
    certdirFiles = glob.glob(os.path.join(certdir, "*"))
    havePlatforms = set([
        os.path.basename(fname) for fname in certdirFiles
        if os.path.isdir(fname)
    ])
    logging.debug("Loaded certificate platforms: %s", ",".join(havePlatforms))
    if "default" not in havePlatforms:
        raise RuntimeError(
            "Platform 'default' is missing from certificate directories")

    metricName = config.get("thresholds", "metric")
    thresholdDistance = config.getfloat("thresholds", "max_distance")
    metricClass = getMetricClass(metricName)
    metric = metricClass()

    # Debugging options, graphviz dump
    dumpGraphvizTrie = False
    if config.has_option("debug", "dump_graphviz_trie"):
        dumpGraphvizTrie = config.getboolean("debug", "dump_graphviz_trie")
    if dumpGraphvizTrie:
        graphvizFile = config.get("debug", "graphviz_file")
        exitAfterDump = config.getboolean("debug", "exit_after_dump")

    if args.rule_files:
        xmlFnames = args.rule_files
    else:
        xmlFnames = glob.glob(os.path.join(ruledir, "*.xml"))
    trie = RuleTrie()

    rulesets = []
    coverageProblemsExist = False
    nonmatchGroupProblemsExist = False
    testFormattingProblemsExist = False
    for xmlFname in xmlFnames:
        logging.debug("Parsing %s", xmlFname)
        if skipFile(xmlFname):
            logging.debug("Skipping rule file '%s', matches skiplist." %
                          xmlFname)
            continue

        try:
            ruleset = Ruleset(etree.parse(file(xmlFname)).getroot(), xmlFname)
        except Exception, e:
            logging.error("Exception parsing %s: %s" % (xmlFname, e))
        if ruleset.defaultOff and not includeDefaultOff:
            logging.debug("Skipping rule '%s', reason: %s", ruleset.name,
                          ruleset.defaultOff)
            continue
        # Check whether ruleset coverage by tests was sufficient.
        if checkCoverage:
            logging.debug("Checking coverage for '%s'." % ruleset.name)
            problems = ruleset.getCoverageProblems()
            for problem in problems:
                coverageProblemsExist = True
                logging.error(problem)
        if checkNonmatchGroups:
            logging.debug("Checking non-match groups for '%s'." % ruleset.name)
            problems = ruleset.getNonmatchGroupProblems()
            for problem in problems:
                nonmatchGroupProblemsExist = True
                logging.error(problem)
        if checkTestFormatting:
            logging.debug("Checking test formatting for '%s'." % ruleset.name)
            problems = ruleset.getTestFormattingProblems()
            for problem in problems:
                testFormattingProblemsExist = True
                logging.error(problem)
        trie.addRuleset(ruleset)
        rulesets.append(ruleset)
Пример #48
0
def default_bitcoind_utxo_opts(config_file=None):
    """
   Get our default bitcoind UTXO options from a config file.
   """

    if config_file is None:
        config_file = virtualchain.get_config_filename()

    parser = SafeConfigParser()
    parser.read(config_file)

    bitcoind_utxo_opts = {}

    server = None
    port = None
    rpc_username = None
    rpc_password = None
    use_https = None
    version_byte = None

    if parser.has_section("bitcoind_utxo"):

        if parser.has_option("bitcoind_utxo", "server"):
            server = parser.get("bitcoind_utxo", "server")

        if parser.has_option("bitcoind_utxo", "port"):
            port = int(parser.get("bitcoind_utxo", "port"))

        if parser.has_option("bitcoind_utxo", "rpc_username"):
            rpc_username = parser.get("bitcoind_utxo", "rpc_username")

        if parser.has_option("bitcoind_utxo", "rpc_password"):
            rpc_password = parser.get("bitcoind_utxo", "rpc_password")

        if parser.has_option("bitcoind_utxo", "use_https"):

            if parser.get("bitcoind_utxo",
                          "use_https").lower() in ["y", "yes", "true"]:
                use_https = True
            else:
                use_https = False

        if parser.has_option("bitcoind_utxo", "version_byte"):
            version_byte = int(parser.get("bitcoind_utxo", "version_byte"))

    if use_https is None:
        use_https = True

    if version_byte is None:
        version_byte = 0

    if server is None:
        server = '127.0.0.1'

    if port is None:
        port = 8332

    bitcoind_utxo_opts = {
        "utxo_provider": "bitcoind_utxo",
        "rpc_username": rpc_username,
        "rpc_password": rpc_password,
        "server": server,
        "port": port,
        "use_https": use_https,
        "version_byte": version_byte
    }

    # strip Nones
    for (k, v) in bitcoind_utxo_opts.items():
        if v is None:
            del bitcoind_utxo_opts[k]

    return bitcoind_utxo_opts
Пример #49
0
def parseExptConfig(configFile, librariesToSublibrariesDict):
    parser = SafeConfigParser()
    results = parser.read(configFile)
    if len(results) == 0:
        return None, 1, 'Experiment config file not found'

    #output variables
    paramDict = dict()
    exitStatus = 0
    warningString = ''

    ##check all sections
    expectedSections = set([
        'experiment_settings', 'library_settings', 'counts_files',
        'filter_settings', 'sgrna_analysis', 'growth_values', 'gene_analysis'
    ])

    parsedSections = set(parser.sections())

    if len(expectedSections) != len(parsedSections) and len(
            expectedSections) != len(
                expectedSections.intersection(parsedSections)):
        return paramDict, 1, 'Config file does not have all required sections or has extraneous sections!\nExpected:' + ','.join(
            expectedSections) + '\nFound:' + ','.join(parsedSections)

    ##experiment settings
    if parser.has_option('experiment_settings', 'output_folder'):
        paramDict['output_folder'] = parser.get(
            'experiment_settings',
            'output_folder')  #ways to check this is a valid path?
    else:
        warningString += 'No output folder specified, defaulting to current directory\n.'
        paramDict['output_folder'] = os.curdir()

    if parser.has_option('experiment_settings', 'experiment_name'):
        paramDict['experiment_name'] = parser.get('experiment_settings',
                                                  'experiment_name')
    else:
        warningString += 'No experiment name specified, defaulting to \'placeholder_expt_name\'\n.'
        paramDict['experiment_name'] = 'placeholder_expt_name'

    ##library settings
    libraryDict = librariesToSublibrariesDict
    if parser.has_option('library_settings', 'library'):
        parsedLibrary = parser.get('library_settings', 'library')

        if parsedLibrary.lower() in libraryDict:
            paramDict['library'] = parsedLibrary.lower()
        else:
            warningString += 'Library name \"%s\" not recognized\n' % parsedLibrary
            exitStatus += 1

    else:
        warningString += 'No library specified\n'
        exitStatus += 1
        parsedLibrary = ''

    if 'library' in paramDict:
        if parser.has_option('library_settings', 'sublibraries'):
            parsedSubList = parser.get('library_settings',
                                       'sublibraries').strip().split('\n')

            paramDict['sublibraries'] = []

            for sub in parsedSubList:
                sub = sub.lower()
                if sub in libraryDict[paramDict['library']]:
                    paramDict['sublibraries'].append(sub)

                else:
                    warningString += 'Sublibrary %s not recognized\n' % sub

        else:
            paramDict['sublibraries'] = libraryDict[paramDict['library']]

    ##counts files
    if parser.has_option('counts_files', 'counts_file_string'):
        countsFileString = parser.get('counts_files',
                                      'counts_file_string').strip()

        paramDict['counts_file_list'] = []

        for stringLine in countsFileString.split('\n'):
            stringLine = stringLine.strip()

            if len(stringLine.split(':')) != 2 or len(
                    stringLine.split('|')) != 2:
                warningString += 'counts file entry could not be parsed: ' + stringLine + '\n'
                exitStatus += 1

            else:
                parsedPath = stringLine.split(':')[0]

                if os.path.isfile(parsedPath) == False:
                    warningString += 'Counts file not found: ' + parsedPath + '\n'
                    exitStatus += 1

                condition, replicate = stringLine.split(':')[1].split('|')

                paramDict['counts_file_list'].append(
                    (condition, replicate, parsedPath))

    else:
        warningString += 'No counts files entered\n'
        exitStatus += 1

    ##filter settings
    filterOptions = ['either', 'both']
    if parser.has_option('filter_settings', 'filter_type') and parser.get(
            'filter_settings', 'filter_type').lower() in filterOptions:
        paramDict['filter_type'] = parser.get('filter_settings',
                                              'filter_type').lower()
    else:
        warningString += 'Filter type not set or not recognized, defaulting to \'either\'\n'
        paramDict['filter_type'] = 'either'

    if parser.has_option('filter_settings', 'minimum_reads'):
        try:
            paramDict['minimum_reads'] = parser.getint('filter_settings',
                                                       'minimum_reads')
        except ValueError:
            warningString += 'Minimum read value not an integer, defaulting to 0\n'  #recommended value is 50 but seems arbitrary to default to that
            paramDict['minimum_reads'] = 0
    else:
        warningString += 'Minimum read value not found, defaulting to 0\n'  #recommended value is 50 but seems arbitrary to default to that
        paramDict['minimum_reads'] = 0

    ##sgRNA Analysis
    if parser.has_option('sgrna_analysis', 'condition_string'):
        conditionString = parser.get('sgrna_analysis',
                                     'condition_string').strip()

        paramDict['condition_tuples'] = []

        if 'counts_file_list' in paramDict:
            expectedConditions = set(zip(*paramDict['counts_file_list'])[0])
        else:
            expectedConditions = []

        enteredConditions = set()

        for conditionStringLine in conditionString.split('\n'):
            conditionStringLine = conditionStringLine.strip()

            if len(conditionStringLine.split(':')) != 3:
                warningString += 'Phenotype condition line not understood: ' + conditionStringLine + '\n'
                exitStatus += 1
            else:
                phenotype, condition1, condition2 = conditionStringLine.split(
                    ':')

                if condition1 not in expectedConditions or condition2 not in expectedConditions:
                    warningString += 'One of the conditions entered does not correspond to a counts file: ' + conditionStringLine + '\n'
                    exitStatus += 1
                else:
                    paramDict['condition_tuples'].append(
                        (phenotype, condition1, condition2))
                    enteredConditions.add(condition1)
                    enteredConditions.add(condition2)

        if len(paramDict['condition_tuples']) == 0:
            warningString += 'No phenotype score/condition pairs found\n'
            exitStatus += 1

        unusedConditions = list(expectedConditions - enteredConditions)
        if len(unusedConditions) > 0:
            warningString += 'Some conditions assigned to counts files will not be incorporated in sgRNA analysis:\n' \
                + ','.join(unusedConditions) + '\n'

    else:
        warningString += 'No phenotype score/condition pairs entered\n'
        exitStatus += 1

    pseudocountOptions = ['zeros only', 'all values', 'filter out']
    if parser.has_option(
            'sgrna_analysis', 'pseudocount_behavior') and parser.get(
                'sgrna_analysis',
                'pseudocount_behavior').lower() in pseudocountOptions:
        paramDict['pseudocount_behavior'] = parser.get(
            'sgrna_analysis', 'pseudocount_behavior').lower()
    else:
        warningString += 'Pseudocount behavior not set or not recognized, defaulting to \'zeros only\'\n'
        paramDict['pseudocount_behavior'] = 'zeros only'

    if parser.has_option('sgrna_analysis', 'pseudocount'):
        try:
            paramDict['pseudocount'] = parser.getfloat('sgrna_analysis',
                                                       'pseudocount')
        except ValueError:
            warningString += 'Pseudocount value not an number, defaulting to 0.1\n'
            paramDict['pseudocount'] = 0.1
    else:
        warningString += 'Pseudocount value not found, defaulting to 0.1\n'
        paramDict['pseudocount'] = 0.1

    ##Growth Values
    if parser.has_option('growth_values', 'growth_value_string') and len(
            parser.get('growth_values', 'growth_value_string').strip()) != 0:
        growthValueString = parser.get('growth_values',
                                       'growth_value_string').strip()

        if 'condition_tuples' in paramDict and 'counts_file_list' in paramDict:
            expectedComparisons = set(zip(*paramDict['condition_tuples'])[0])
            expectedReplicates = set(zip(*paramDict['counts_file_list'])[1])

            expectedTupleList = []

            for comp in expectedComparisons:
                for rep in expectedReplicates:
                    expectedTupleList.append((comp, rep))
        else:
            expectedTupleList = []

        enteredTupleList = []
        growthValueTuples = []

        for growthValueLine in growthValueString.split('\n'):
            growthValueLine = growthValueLine.strip()

            linesplit = growthValueLine.split(':')

            if len(linesplit) != 3:
                warningString += 'Growth value line not understood: ' + growthValueLine + '\n'
                exitStatus += 1
                continue

            comparison = linesplit[0]
            replicate = linesplit[1]

            try:
                growthVal = float(linesplit[2])
            except ValueError:
                warningString += 'Growth value not a number: ' + growthValueLine + '\n'
                exitStatus += 1
                continue

            curTup = (comparison, replicate)
            if curTup in expectedTupleList:
                if curTup not in enteredTupleList:
                    enteredTupleList.append(curTup)
                    growthValueTuples.append(
                        (comparison, replicate, growthVal))

                else:
                    warningString += ':'.join(
                        curTup) + ' has multiple growth values entered\n'
                    exitStatus += 1
            else:
                warningString += ':'.join(
                    curTup
                ) + ' was not expected given the specified counts file assignments and sgRNA phenotypes\n'
                exitStatus += 1

        #because we enforced no duplicates or unexpected values these should match up unless there were values not entered
        #require all growth values to be explictly entered if some were
        if len(enteredTupleList) != len(expectedTupleList):
            warningString += 'Growth values were not entered for all expected comparisons/replicates. Expected: ' + \
                ','.join([':'.join(tup) for tup in expectedTupleList]) + '\nEntered: ' + \
                ','.join([':'.join(tup) for tup in enteredTupleList]) + '\n'
            exitStatus += 1
        else:
            paramDict['growth_value_tuples'] = growthValueTuples

    else:
        warningString += 'No growth values--all phenotypes will be reported as log2enrichments\n'

        paramDict['growth_value_tuples'] = []

        if 'condition_tuples' in paramDict and 'counts_file_list' in paramDict:
            expectedComparisons = set(zip(*paramDict['condition_tuples'])[0])
            expectedReplicates = set(zip(*paramDict['counts_file_list'])[1])

            for comp in expectedComparisons:
                for rep in expectedReplicates:
                    paramDict['growth_value_tuples'].append((comp, rep, 1))

    ##Gene Analysis
    if parser.has_option('gene_analysis', 'collapse_to_transcripts'):
        try:
            paramDict['collapse_to_transcripts'] = parser.getboolean(
                'gene_analysis', 'collapse_to_transcripts')
        except ValueError:
            warningString += 'Collapse to transcripts entry not a recognized boolean value\n'
            exitStatus += 1
    else:
        paramDict['collapse_to_transcripts'] = True
        warningString += 'Collapse to transcripts defaulting to True\n'

    #pseudogene parameters
    if parser.has_option('gene_analysis', 'generate_pseudogene_dist'):
        paramDict['generate_pseudogene_dist'] = parser.get(
            'gene_analysis', 'generate_pseudogene_dist').lower()

        if paramDict['generate_pseudogene_dist'] not in [
                'auto', 'manual', 'off'
        ]:
            warningString += 'Generate pseudogene dist entry not a recognized option\n'
            exitStatus += 1
    else:
        paramDict['generate_pseudogene_dist'] = False
        warningString += 'Generate pseudogene dist defaulting to False\n'

    if 'generate_pseudogene_dist' in paramDict and paramDict[
            'generate_pseudogene_dist'] == 'manual':
        if parser.has_option('gene_analysis', 'pseudogene_size'):
            try:
                paramDict['pseudogene_size'] = parser.getint(
                    'gene_analysis', 'pseudogene_size')
            except ValueError:
                warningString += 'Pseudogene size entry not a recognized integer value\n'
                exitStatus += 1
        else:
            warningString += 'No pseudogene size provided\n'
            exitStatus += 1

        if parser.has_option('gene_analysis', 'num_pseudogenes'):
            try:
                paramDict['num_pseudogenes'] = parser.getint(
                    'gene_analysis', 'num_pseudogenes')
            except ValueError:
                warningString += 'Pseudogene number entry not a recognized integer value\n'
                exitStatus += 1
        else:
            warningString += 'No pseudogene size provided\n'

    #list possible analyses in param dict as dictionary with keys = analysis and values = analysis-specific params

    paramDict['analyses'] = dict()

    #analyze by average of best n
    if parser.has_option('gene_analysis', 'calculate_ave'):
        try:
            if parser.getboolean('gene_analysis', 'calculate_ave') == True:
                paramDict['analyses']['calculate_ave'] = []
        except ValueError:
            warningString += 'Calculate ave entry not a recognized boolean value\n'
            exitStatus += 1

        if 'calculate_ave' in paramDict['analyses']:
            if parser.has_option('gene_analysis', 'best_n'):
                try:
                    paramDict['analyses']['calculate_ave'].append(
                        parser.getint('gene_analysis', 'best_n'))
                except ValueError:
                    warningString += 'Best_n entry not a recognized integer value\n'
                    exitStatus += 1
            else:
                warningString += 'No best_n value provided for average analysis function\n'
                exitStatus += 1
    else:
        warningString += 'Best n average analysis not specified, defaulting to False\n'

    #analyze by Mann-Whitney
    if parser.has_option('gene_analysis', 'calculate_mw'):
        try:
            if parser.getboolean('gene_analysis', 'calculate_mw') == True:
                paramDict['analyses']['calculate_mw'] = []
        except ValueError:
            warningString += 'Calculate Mann-Whitney entry not a recognized boolean value\n'
            exitStatus += 1

    #analyze by K-S, skipping for now

    #analyze by nth best sgRNA
    if parser.has_option('gene_analysis', 'calculate_nth'):
        try:
            if parser.getboolean('gene_analysis', 'calculate_nth') == True:
                paramDict['analyses']['calculate_nth'] = []
        except ValueError:
            warningString += 'Calculate best Nth sgRNA entry not a recognized boolean value\n'
            exitStatus += 1

        if 'calculate_nth' in paramDict['analyses']:
            if parser.has_option('gene_analysis', 'nth'):
                try:
                    paramDict['analyses']['calculate_nth'].append(
                        parser.getint('gene_analysis', 'nth'))
                except ValueError:
                    warningString += 'Nth best sgRNA entry not a recognized integer value\n'
                    exitStatus += 1
            else:
                warningString += 'No Nth best value provided for that analysis function\n'
                exitStatus += 1
    else:
        warningString += 'Nth best sgRNA analysis not specified, defaulting to False\n'

    if len(paramDict['analyses']) == 0:
        warningString += 'No analyses selected to compute gene scores\n'  #should this raise exitStatus?

    return paramDict, exitStatus, warningString
Пример #50
0
def default_mock_utxo_opts(config_file=None):
    """
   Get default options for the mock UTXO provider.
   """

    mock_tx_list = None
    mock_tx_file = None
    mock_start_block = FIRST_BLOCK_MAINNET
    mock_start_time = None
    mock_difficulty = None
    mock_initial_utxos = None

    if config_file is not None:

        parser = SafeConfigParser()
        parser.read(config_file)

        if parser.has_section("mock_utxo"):

            if parser.has_option('mock_utxo', 'tx_list'):
                # should be a csv of raw transactions
                mock_tx_list = parser.get('mock_utxo', 'tx_list').split(',')

            if parser.has_option('mock_utxo', 'tx_file'):
                # should be a path
                mock_tx_file = parser.get('mock_utxo', 'tx_file')

            if parser.has_option('mock_utxo', 'start_block'):
                # should be an int
                try:
                    mock_start_block = int(
                        parser.get('mock_utxo', 'start_block'))
                except:
                    print >> sys.stderr, "Invalid 'start_block' value: expected int"
                    return None

            if parser.has_option('mock_utxo', 'difficulty'):
                # should be a float
                try:
                    mock_difficulty = float(
                        parser.get('mock_utxo', 'difficulty'))
                except:
                    print >> sys.stderr, "Invalid 'difficulty' value: expected float"
                    return None

            if parser.has_option('mock_utxo', 'start_block'):
                # should be an int
                try:
                    mock_start_block = int(
                        parser.get('mock_utxo', 'start_block'))
                except:
                    print >> sys.stderr, "Invalid 'start_block' value: expected int"
                    return None

            if parser.has_option('mock_utxo', 'initial_utxos'):
                # should be a csv of privatekey:int
                try:
                    wallet_info = parser.get('mock_utxo',
                                             'initial_utxos').split(',')
                    wallets = {}
                    for wi in wallet_info:
                        privkey, value = wi.split(':')
                        wallets[privkey] = int(value)

                    mock_initial_utxos = wallets

                except:
                    print >> sys.stderr, "Invalid 'mock_initial_utxos' value: expected CSV of wif_private_key:int"
                    return None

    default_mock_utxo_opts = {
        "utxo_provider": "mock_utxo",
        "tx_list": mock_tx_list,
        "tx_file": mock_tx_file,
        "start_block": mock_start_block,
        "difficulty": mock_difficulty,
        "initial_utxos": mock_initial_utxos,
        "start_block": mock_start_block
    }

    # strip Nones
    for (k, v) in default_mock_utxo_opts.items():
        if v is None:
            del default_mock_utxo_opts[k]

    return default_mock_utxo_opts
Пример #51
0
class Viewer(object):
    """Viewer tool."""
    
    def __init__(self):
        self._init_log()
        self._init_config(['cg2_defaults.conf'])
        self._init_modules()
        self._init_input()
        self._init_tk()
        self._init_gl()
        #self._init_camera()

    def _init_log(self):
        logging.basicConfig(level=logging.DEBUG)
        self._log = logging.getLogger("Application")
        self._log.info(u"Starting up...")
    
    def _init_config(self, default_config_files):
        self._log.info(u"Initializing configuration...")
        config_files = list(default_config_files)
        
        parser = OptionParser()
        parser.add_option('-c', '--conf', dest='config_file', default=None, help=u"config file to use (besides cg1_defaults.conf)")
        (options, args) = parser.parse_args()
        
        if options.config_file:
            config_files.append(options.config_file)
        
        self._log.info(u"Reading config files %s...", config_files)
        self._config = SafeConfigParser()
        self._config.read(config_files)

        #scene = getScene()
        #scene.setGlobal("stereo", self.options.stereo)
        #scene.setGlobal("polygonmode", self.options.polygon_mode)
        #scene.setGlobal("navigationmode", self.options.navigation_mode)
        #self.separate_specular_color = False
        #self.draw_orientation = True

        self._modules = args
        
        sys.setrecursionlimit(4000)
        self._log.info(u"Recursion limit is now %d." % sys.getrecursionlimit())

    def _init_modules(self):
        self._log.info(u"Initializing modules...")
        self._scene_globals = {'scene' : cgkit.scene.getScene()}
        for filename in self._modules:
            execfile(filename, self._scene_globals)
        self.tk_option_frame_classes = list(self._scene_globals['option_frames'])

    def _init_input(self):
        self._log.info(u"Initializing input...")
        self.keydict = {
              8  : KEY_BACK,
              9  : KEY_TAB,
             13 : KEY_RETURN,
             27 : KEY_ESCAPE,
             32 : KEY_SPACE,
            276 : KEY_LEFT,
            273 : KEY_UP,
            275 : KEY_RIGHT,
            274 : KEY_DOWN,
            301 : KEY_CAPSLOCK,
            304 : KEY_SHIFT_LEFT,
            303 : KEY_SHIFT_RIGHT,
            306 : KEY_CONTROL_LEFT,
            305 : KEY_CONTROL_RIGHT,
            308 : KEY_ALT_LEFT,
            307 : KEY_ALT_RIGHT,
            310 : KEY_WINDOWS_LEFT,
            309 : KEY_WINDOWS_RIGHT,
            319 : KEY_WINDOWS_MENU,
            317 : KEY_PRINT,
            302 : KEY_SCROLL,
             19 : KEY_PAUSE,
            277 : KEY_INSERT,
            127 : KEY_DELETE,
            278 : KEY_HOME,
            279 : KEY_END,
            280 : KEY_PRIOR,
            281 : KEY_NEXT,
            282 : KEY_F1,
            283 : KEY_F2,
            284 : KEY_F3,
            285 : KEY_F4,
            286 : KEY_F5,
            287 : KEY_F6,
            288 : KEY_F7,
            289 : KEY_F8,
            290 : KEY_F9,
            291 : KEY_F10,
            292 : KEY_F11,
            293 : KEY_F12,
            300 : KEY_NUMLOCK,
            256 : KEY_NUMPAD0,
            257 : KEY_NUMPAD1,
            258 : KEY_NUMPAD2,
            259 : KEY_NUMPAD3,
            260 : KEY_NUMPAD4,
            261 : KEY_NUMPAD5,
            262 : KEY_NUMPAD6,
            263 : KEY_NUMPAD7,
            264 : KEY_NUMPAD8,
            265 : KEY_NUMPAD9,
            266 : KEY_NUMPAD_DECIMAL,
            267 : KEY_NUMPAD_DIVIDE,
            268 : KEY_NUMPAD_MULTIPLY,
            269 : KEY_NUMPAD_SUBTRACT,
            270 : KEY_NUMPAD_ADD,
            271 : KEY_NUMPAD_ENTER
            }

    def _init_tk(self):
        self._log.info(u"Initializing tk widgets...")
        self.tk_root = Tk()
        self.tk_option_frames = []
        self.tk_run_button = Button(self.tk_root, text="Run", command=self.run_gl)
        self.tk_run_button.grid(row=0, column=0, sticky=W+E)
        self.tk_option_frame_classes = [GlobalOptionsFrame, ] + getattr(self, 'tk_option_frame_classes', [])
        for index, option_frame_class in enumerate(self.tk_option_frame_classes):
            option_frame = option_frame_class(self.tk_root)
            option_frame.grid(row=index+1, column=0, sticky=W+E)
            self.tk_option_frames.append(option_frame)

    def _init_gl(self):
        self._log.info(u"Initializing opengl renderer...")
        passed, failed = pygame.init()
        if failed>0:
            self._log.error(u"Warning: %d pygame modules couldn't be initialized" % failed)
        self.gl_renderer = GLRenderInstance()

    def _init_camera(self):
        scene = getScene()

        if self._config.has_option('scene', 'camera'):
            cname = self._config.get('scene', 'camera')
        else:
            cname = None
        
        # Search for a camera...
        cam = None
        for obj in scene.walkWorld():
            prots = obj.protocols()
            if ICamera in prots:
                if obj.name==cname or cname==None :
                    cam = obj
                    break

        if cname!=None and cam==None:
            raise ValueError, 'Camera "%s" not found.' % cname

        # No camera? Then create a default camera...
        if cam==None:
            self._log.info(u"No camera set, using a default camera.")
            bbmin, bbmax = scene.boundingBox().getBounds()
            dif = bbmax-bbmin
            b1 = scene.up.ortho()
            b2 = scene.up.cross(b1)
            pos = dif.length()*(0.5*b1+b2) + (bbmax.z+0.5*dif.z)*scene.up
            if abs(dif.z)<0.0001:
                pos += 0.8*dif.length()*scene.up
            cam = TargetCamera(pos = pos,
                               target = 0.5*(bbmin+bbmax)-0.2*(dif.z*scene.up),
                               fov = 50)
        else:
            self._log.info(u"Camera: %s" % cam.name)

        self._camera = cam

    def run(self):
        self.tk_root.mainloop()

    def run_gl(self):
        # Create a camera control component
        self._init_camera()
        CameraControl(cam=self._camera, mode=1)

        # Get options...
        width = self._config.getint('window', 'width')
        height = self._config.getint('window', 'height')

        # Open a window...
        pygame.display.set_caption("OpenGL viewer")
        flags = OPENGL | DOUBLEBUF
        self.gl_surface = pygame.display.set_mode((width,height), flags)

        # Try to get the native window handle
        # (this only works with pygame 1.6.2 and later)
        try:
            info = pygame.display.get_wm_info()
            hwnd = info["window"]
        except:
            hwnd = None

        # Event loop...
        self._running = True
        self._timer = getScene().timer()
        self._clock = pygame.time.Clock()
        self._cnt = 0
        self._timer.startClock()
        self._fps = self._config.getint('general', 'framerate')

        self.tk_root.after(50, self._loop_once, width, height)
        self._log.info(u"3d visualization running...")
    
    def stop_gl(self):
        self._running = False

    def _loop_once(self, width, height):
        if self._running:
            self.tk_root.after(1000/self._fps, self._loop_once, width, height)

            # Display the scene
            self.draw(self._camera, width, height)
            pygame.display.flip()

            # Handle events
            events = pygame.event.get()
            self.handleEvents(events)

            self._timer.step()

            # Sync
            self._clock.tick(1000/self._fps)

    # handleEvents
    def handleEvents(self, events):
        eventmanager = eventManager()
        width = self._config.getint('window', 'width')
        height = self._config.getint('window', 'height')
        for e in events:
            if e.type==QUIT:
                self._running=False
            # KEYDOWN?
            elif e.type==KEYDOWN:
                if e.key==27:
                    self._running=False
                key = e.unicode
                code = self.keydict.get(e.key, e.key)
                mods = self.convertMods(e.mod)
                eventmanager.event(KEY_PRESS, KeyEvent(key, code, mods))
#                keyboard.setKeyValue(e.key, True)
            # KEYUP
            elif e.type==KEYUP:
                code = self.keydict.get(e.key, e.key)
                try:
                    key = unicode(chr(e.key))
                except:
                    key = u""
                mods = self.convertMods(e.mod)
                eventmanager.event(KEY_RELEASE, KeyEvent(key, code, mods))
#                keyboard.setKeyValue(e.key, False)
            # MOUSEBUTTONDOWN
            elif e.type==MOUSEBUTTONDOWN:
                x,y = e.pos
                x0 = float(x)/width
                y0 = float(y)/height
                if e.button==1:
                    eventname = LEFT_DOWN
                    evt = MouseButtonEvent(e.button, x, y, x0, y0)
                elif e.button==2:
                    eventname = MIDDLE_DOWN
                    evt = MouseButtonEvent(e.button, x, y, x0, y0)
                elif e.button==3:
                    eventname = RIGHT_DOWN
                    evt = MouseButtonEvent(e.button, x, y, x0, y0)
                elif e.button==4:
                    eventname = MOUSE_WHEEL
                    evt = MouseWheelEvent(120, x, y, x0, y0)
                elif e.button==5:
                    eventname = MOUSE_WHEEL
                    evt = MouseWheelEvent(-120, x, y, x0, y0)
                else:
                    eventname = MOUSE_BUTTON_DOWN
                    evt = MouseButtonEvent(e.button, x, y, x0, y0)
                eventmanager.event(eventname, evt)
            # MOUSEBUTTONUP
            elif e.type==MOUSEBUTTONUP:
                x,y = e.pos
                x0 = float(x)/width
                y0 = float(y)/height
                if e.button==1:
                    eventname = LEFT_UP
                    evt = MouseButtonEvent(e.button, x, y, x0, y0)
                elif e.button==2:
                    eventname = MIDDLE_UP
                    evt = MouseButtonEvent(e.button, x, y, x0, y0)
                elif e.button==3:
                    eventname = RIGHT_UP
                    evt = MouseButtonEvent(e.button, x, y, x0, y0)
                elif e.button==4:
                    eventname = MOUSE_WHEEL
                    evt = MouseWheelEvent(120, x, y, x0, y0)
                elif e.button==5:
                    eventname = MOUSE_WHEEL
                    evt = MouseWheelEvent(-120, x, y, x0, y0)
                else:
                    eventname = MOUSE_BUTTON_UP
                    evt = MouseButtonEvent(e.button, x, y, x0, y0)
                eventmanager.event(eventname, evt)
            # MOUSEMOTION
            elif e.type==MOUSEMOTION:
                btns = 0
                b1,b2,b3 = e.buttons
                if b1:
                    btns |= 0x1
                if b2:
                    btns |= 0x2
                if b3:
                    btns |= 0x4                
                x,y = e.pos
                dx, dy = e.rel
                x0 = float(x)/width
                y0 = float(y)/height
                dx0 = float(dx)/width
                dy0 = float(dy)/height
                evt = MouseMoveEvent(x, y, dx, dy, x0, y0, dx0, dy0, btns)
                eventmanager.event(MOUSE_MOVE, evt)
            # SYSWMEVENT
            elif e.type==SYSWMEVENT:
                if sys.platform=="win32" and not hasattr(e, "msg") and not pygame.event.get_blocked(SYSWMEVENT):
                    pygame.event.set_blocked(SYSWMEVENT)
                    print "Warning: This version of pygame does not allow processing system events."
                
    def setOptions(self, optparser):
        """Add options specific to this tool."""
        
        Tool.setOptions(self, optparser)
        optparser.add_option("-F", "--full-screen", action="store_true", default=False,
                             help="Full screen display")
        optparser.add_option("-S", "--stereo", metavar="MODE",
                             help="Activate stereo display (vsplit, glstereo)")
        optparser.add_option("-D", "--eye-distance", type="float", default=0.07,
                             help="Default eye distance for stereo display. Default: 0.07")
        optparser.add_option("-B", "--bounding-box", action="store_true", default=False,
                             help="Show bounding boxes")
        optparser.add_option("-P", "--polygon-mode", metavar="MODE",
                             help="Polygon mode (fill, line, point). Default: fill")
        optparser.add_option("-s", "--save", metavar="NAME",
                             help="Save screenshots as images.")
        optparser.add_option("-N", "--navigation-mode", metavar="MODE",
                             help="Navigation mode (MAX, Maya, Softimage). Default: Maya")
        optparser.add_option("-X", "--disable-spacedevice", action="store_true", default=False,
                             help="Disable SpaceMouse/SpaceBall.")
        optparser.add_option("-T", "--disable-wintab", action="store_true", default=False,
                             help="Disable tablet support.")
        
    def convertMods(self, mods):
        """Convert pygame key modifier flags to cgkit modifier flags.
        """
        res = 0
        if mods & 0x0001 or mods & 0x0002:
            res |= KEYMOD_SHIFT
        if mods & 0x0040 or mods & 0x0080:
            res |= KEYMOD_CONTROL
        if mods & 0x0100 or mods & 0x0200:
            res |= KEYMOD_ALT
        return res

    def draw(self, cam, width, height):
        scene = getScene()
        renderer = self.gl_renderer

        # Set handedness
        renderer.left_handed = scene.handedness=="l"
        renderer.setViewport(0,0,width,height)

        renderer.draw_solid = True
        #renderer.draw_bboxes = self.options.bounding_box
        renderer.draw_coordsys = False
        #renderer.draw_orientation = self.draw_orientation
        renderer.smooth_model = True
        renderer.backface_culling = False
        #renderer.separate_specular_color = self.separate_specular_color
        #renderer.polygon_mode = self.polygon_mode  # 0=Point 1=Line 2=Fill
        #renderer.stereo_mode = self.stereo_mode
        renderer.clearcol = vec4(scene.getGlobal("background", vec4(0.5,0.5,0.6,0)))

        # Set projection matrix
        near, far = cam.getNearFar()
        P = cam.projection(width,height,near,far)
        renderer.setProjection(P)

        # Set view matrix
        renderer.setViewTransformation(cam.viewTransformation(), 0)

        # Draw scene
        root = scene.worldRoot()
        renderer.paint(root)

    # saveScreenshot
    def saveScreenshot(self, srf):
        """Save the current window content.

        srf is the pygame Surface object.
        """
        name,ext = os.path.splitext(self.options.save)
        f = int(round(getScene().timer().frame))
        fname = "%s%04d%s"%(name, f, ext)
        print 'Saving "%s"...'%fname
        data = pygame.image.tostring(srf, "RGB")
        img = Image.fromstring("RGB", (srf.get_width(), srf.get_height()), data)
        img.save(fname)

    # setOptionsFromGlobals
    def setOptionsFromGlobals(self):
        Tool.setOptionsFromGlobals(self)

        scene = getScene()
        self.options.full_screen = scene.getGlobal("fullscreen", self.options.full_screen)

        self.options.eye_distance = float(scene.getGlobal("eyedistance", self.options.eye_distance))

        # Check the stereo option and initialize the variable "stereo_mode"
        Sopt = scene.getGlobal("stereo", None)
        self.stereo_mode = self.translateKeyWordOpt(Sopt,
                                { None:0, "vsplit":1, "glstereo":2 },
                                "Unknown stereo mode: '%s'")

        # Check the polygon mode option
        Popt = scene.getGlobal("polygonmode", "fill")
        self.polygon_mode = self.translateKeyWordOpt(Popt,
                                { None:2, "point":0, "line":1, "fill":2 },
                                "Unknown polygon mode: '%s'")

        # Check the navigationmode option
        Nopt = scene.getGlobal("navigationmode", "maya")
        self.navigation_mode = self.translateKeyWordOpt(Nopt,
                            { None:1, "max":0, "maya":1, "softimage":2 },
                            "Unknown navigation mode: '%s'")
Пример #52
0
    parser.add_argument('--send_email', type=bool, default=False,
                    help='Should we send email? (Default false)')
    parser.add_argument('--email_to', type=str, default='',
                help='Send email to')
    parser.add_argument('--email_from', type=str, default='',
                help='Who is sending the email?')
    parser.add_argument('--list_vdis', type=bool, default=False,
                help='List volumes VDIs? (Default false)')
    args = parser.parse_args()


    # import keys from cloudmonkey config
    parser = SafeConfigParser()
    parser.read(os.path.expanduser('~/.cloudmonkey/config'))
    if parser.has_section(args.region):
        if not parser.has_option(args.region, 'url'):
            sys.exit("There is no url option in '%s' section" % args.region)
        if not parser.has_option(args.region, 'apikey'):
            sys.exit("There is no 'apikey' option in '%s' section" % args.region)
        if not parser.has_option(args.region, 'secretkey'):
            sys.exit("There is no 'secretkey' option in '%s' section" % args.region)

        apikey = parser.get(args.region, 'apikey')
        api_url = parser.get(args.region, 'url')
        secretkey = parser.get(args.region, 'secretkey')
        db_host = parser.get(args.region, 'db_host')
        db_user = parser.get(args.region, 'db_user')
        db_password = parser.get(args.region, 'db_password')


    else:
Пример #53
0
        exit(1)

    path.append(OPTIONS.twister_path)

    from ConfigParser import SafeConfigParser

    from services.PacketSniffer.PacketSniffer import Sniffer

    # load execution process configuration
    _EP_CONFIG = dict()
    EP_CONFIG = SafeConfigParser()
    EP_CONFIG.read(OPTIONS.twister_path + '/config/epname.ini')
    for s in [
            _s for _s in EP_CONFIG.sections()
            if not _s == 'PACKETSNIFFERPLUGIN' and EP_CONFIG.has_option(
                _s, 'ENABLED') and EP_CONFIG.get(_s, 'ENABLED')
    ]:
        _EP_CONFIG.update([
            (s, {
                'CE_IP': EP_CONFIG.get(s, 'CE_IP'),
                'CE_PORT': EP_CONFIG.get(s, 'CE_PORT')
            }),
        ])

    EP_CONFIG = list(_EP_CONFIG.itervalues())

    # initiate and start SNIFFER
    SNIFFER = Sniffer(user=OPTIONS.user,
                      epConfig=EP_CONFIG,
                      OFPort=OPTIONS.of_port,
                      iface=OPTIONS.eth_interface)
Пример #54
0
from galaxy import eggs

import pkg_resources

pkg_resources.require("sqlalchemy-migrate")

from migrate.versioning.shell import main
from ConfigParser import SafeConfigParser

log = logging.getLogger(__name__)

cp = SafeConfigParser()
cp.read("universe_wsgi.ini")

if cp.has_option("app:main", "database_connection"):
    db_url = cp.get("app:main", "database_connection")
elif cp.has_option("app:main", "database_file"):
    db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % cp.get(
        "app:main", "database_file")
else:
    db_url = None

dialect_to_egg = {
    "sqlite": "pysqlite>=2",
    "postgres": "psycopg2",
    "mysql": "MySQL_python"
}
dialect = (db_url.split(':', 1))[0]
try:
    egg = dialect_to_egg[dialect]
Пример #55
0
#participantList is extracted from number of subdirectories of dataPath
participantList = [f for f in listdir(dataPath) if isdir(join(dataPath, f))]

print '-------------------'
print dataPath
print modelPath
print participantList
print trainName
print mode
print '-------------------'

try:
	parser = SafeConfigParser()
	found = parser.read(dataPath + "/config.ini")

	if(parser.has_option(trainName, 'update_mode')):
		modeConfig = parser.get(trainName, 'update_mode') 
	else:
		modeConfig = 'update'
except IOError:
	pass

if(mode == 'new' or modeConfig == 'new' or '.pickle' not in modelPath): #or update but no .pickle
	print 'loading training parameters from config.ini' 
	try:
		parser = SafeConfigParser()
		found = parser.read(dataPath + "/config.ini")

		#load parameters from config file
		if(parser.has_option(trainName, 'imgH')):
			imgH = int(parser.get(trainName, 'imgH') )
Пример #56
0
  def __init__(self, uid, debug):
    # variable names from the configfile
    log_activities = 'log_activities'
    restore = 'restore'
    dirname = 'path'
    owner = 'owner'
    group = 'group'
    chmodf = 'chmodf'
    chmodd = 'chmodd'
    excludedir = 'excludepath'
    excludepattern = 'excludepattern'
    
    # default values for the section general
    logitdefault = 'no'
    restoredefault = 'no'
    
    # create the needed lists
    self.doit = []
    self.dirname = []
    self.owner = []
    self.group = []
    self.chmodf = []
    self.chmodd = []
    self.excludedir = []
    self.excludepattern = []
    
    #prepare the error variable
    errorsoccured = False
    
    # check for the existence of a config-file
    if uid == 0:
      if os_path.isfile('/etc/experms.conf'):
        configfile = '/etc/experms.conf'
      elif os_path.isfile(sys.path[0] + '/experms.conf'):
        configfile = sys.path[0] + '/experms.conf'
      else:
        print >> sys.stderr, "\033[31;1mError: No configuration-file (/etc/experms.conf) was found.\033[0m"
        sys.exit(1)
    else:
      home = os_path.expanduser("~")
      if os_path.isfile(home + '/.experms.conf'):
        configfile = home + '/.experms.conf'
      elif os_path.isfile(home + '/experms.conf'):
        configfile = home + '/experms.conf'
      elif os_path.isfile(sys.path[0] + '/experms.conf'):
        configfile = sys.path[0] + '/experms.conf'
      else:
        print >> sys.stderr, "\033[31;1mError: No configuration-file (~/experms.conf or ~/.experms.conf) was found.\033[0m"
        sys.exit(1)
    print "Using configuration-file '" + configfile + "'"
    
    # parse the config-file
    parser = SafeConfigParser()
    try:
      parser.read(configfile)
    except MissingSectionHeaderError:
      pass

    if parser.has_section('general'):
      if debug == True:
        print "section 'general' was found"
      if parser.has_option('general', log_activities):
        self.logit = parser.get('general', log_activities).lower()
        if self.logit == 'yes':
          if debug == True:
            print 'experms will write a log'
        elif self.logit == 'no' or self.logit == '':
          self.logit = logitdefault
          if debug == True:
            print "experms won't write a log"
        else:
          print >> sys.stderr, "\033[31;1mError: 'log_activities' must be either 'yes' or 'no'\033[0m"
          errorsoccured = True
      else:
        self.logit = logitdefault
        if debug == True:
          print "experms won't write a log"
      
      if parser.has_option('general', restore):
        self.restore = parser.get('general', restore).lower()
        if self.restore == 'yes':
          if debug == True:
            print 'experms will restore at start'
        elif self.restore == 'no' or self.restore == '':
          self.restore = restoredefault
          if debug == True:
            print "experms won't restore at start"
        else:
          print >> sys.stderr, "\033[31;1mError: 'restore' must be either 'yes' or 'no'\033[0m"
          errorsoccured = True
      else:
        self.restore = restoredefault
        if debug == True:
          print "experms won't restore at start"
    
      if len(parser.sections()) < 2:
        print >> sys.stderr, "\033[31;1mError: No directory-section was found.\nIf you have started experms for the first time, please edit the configfile first (usually /etc/experms.conf)\033[0m"
        errorsoccured = True
    else:
      self.restore = restoredefault
      self.logit = logitdefault
      if len(parser.sections()) < 1:
        print >> sys.stderr, "\033[31;1mError: No directory-section was found.\nIf you have started experms for the first time, please edit the configfile first (usually /etc/experms.conf)\033[0m"
        errorsoccured = True
    


    self.sectionname = []
    # can't use enumerate here, to make it possible to mixup the order between general and directory sections in the config
    number = -1
    for i in parser.sections():
      if i == 'general':
        continue
      number = number + 1
      self.sectionname.append(i)
      usowchmoderr = True
      self.doit.append('')
      
      if parser.has_option(i, dirname):
        self.dirname.append('')
        self.dirname[number] = parser.get(i, dirname).rstrip('/')
        if self.dirname[number] == '':
          print >> sys.stderr, "\033[31;1mError in section", i + ": 'path' is empty.\nIf you have started experms for the first time, please edit the configfile first.\033[0m"
          errorsoccured = True
        else:
          if not os_path.isdir(self.dirname[number]):
            print >> sys.stderr, "\033[31;1mError in section", i + ": 'path'", self.dirname[number], "doesn't exist\033[0m"
            errorsoccured = True
          else:
            if debug == True:
              print "'dirname' in section '" + i + "' is valid"
      else:
        print >> sys.stderr, "\033[31;1mError in section", i + ": 'dirname' is not set.\033[0m"
        errorsoccured = True
      
      self.owner.append('')  
      if parser.has_option(i, owner):
        self.owner[number] = parser.get(i, owner)
        if self.owner[number] != '':
          try:
            self.owner[number] = int(self.owner[number])
          except ValueError:
            try:
              getpwnam(self.owner[number])
            except KeyError:
              print >> sys.stderr, "\033[31;1mError in section", i + ": User", self.owner[number], "doesn't exist.\033[0m"
              errorsoccured = True
            else:
              # save the user as uid
              self.owner[number] = getpwnam(self.owner[number]).pw_uid
              usowchmoderr = False
              self.doit[number] = 1
              if debug == True:
                print "'user' in section '" + i + "' is valid"
          else:
            try:
              getpwuid(self.owner[number])
            except KeyError:
              print >> sys.stderr, "\033[31;1mError in section", i + ": User", self.owner[number], "doesn't exist.\033[0m"
              errorsoccured = True
            else:
              usowchmoderr = False
              self.doit[number] = 1
              if debug == True:
                print "'user' in section '" + i + "' is valid"
            
        else:
          self.owner[number] = -1
          self.doit[number] = 0
      else:
        self.owner[number] = -1
        self.doit[number] = 0
      
      self.group.append('')
      if parser.has_option(i, group):
        self.group[number] = parser.get(i, group)
        if self.group[number] != '':
          try:
            self.group[number] = int(self.group[number])
          except ValueError:
            try:
              getgrnam(self.group[number])
            except KeyError:
              print >> sys.stderr, "\033[31;1mError in section", i + ": Group", self.group[number], "doesn't exist.\033[0m"
              errorsoccured = True
            else:
              # save the group as gid
              self.group[number] = getgrnam(self.group[number]).gr_gid
              usowchmoderr = False
              self.doit[number] = 1
              if debug == True:
                print "'group' in section '" + i + "' is valid"
          else:
            try:
              getgrgid(self.group[number])
            except KeyError:
              print >> sys.stderr, "\033[31;1mError in section", i + ": Group", self.group[number], "doesn't exist.\033[0m"
              errorsoccured = True
            else:
              usowchmoderr = False
              self.doit[number] = 1
              if debug == True:
                print "'group' in section '" + i + "' is valid"
        else:
          self.group[number] = -1
      else:
        self.group[number] = -1
      
      self.chmodf.append('')
      if parser.has_option(i, chmodf):
        self.chmodf[number] = parser.get(i, chmodf)
        if self.chmodf[number] != '':
          if checkoctalperms(self.chmodf[number], 'chmodf', i, debug):
            if len(self.chmodf[number]) == 3:
              self.chmodf[number] = '0' + self.chmodf[number]
              self.chmodf[number] = int(self.chmodf[number], 8)
            elif len(self.chmodf[number]) == 4:
              self.chmodf[number] = int(self.chmodf[number], 8)
            usowchmoderr = False
            self.doit[number] = self.doit[number] + 2
          else:
            errorsoccured = True
      
      self.chmodd.append('')  
      if parser.has_option(i, chmodd):
        self.chmodd[number] = parser.get(i, chmodd)
        if self.chmodd[number] != '':
          if checkoctalperms(self.chmodd[number], 'chmodd', i, debug):
            if len(self.chmodd[number]) == 3:
              self.chmodd[number] = '0' + self.chmodd[number]
              self.chmodd[number] = int(self.chmodd[number], 8)
            elif len(self.chmodd[number]) == 4:
              self.chmodd[number] = int(self.chmodd[number], 8)
            usowchmoderr = False 
            self.doit[number] = self.doit[number] + 4 
          else:
            errorsoccured = True
      
      self.excludedir.append([])
      if parser.has_option(i, excludedir):
        exvalid = True
        self.excludedir[number] = parser.get(i, excludedir).split(',')
        for nr, item in enumerate(self.excludedir[number]):
          item = item.strip().rstrip('/')
          self.excludedir[number][nr] = item
          if item == '':
            self.excludedir[number].remove(item)
          else:
            if not os_path.isdir(item) and not os_path.isfile(item):
              print >> sys.stderr, "\033[31;1mError in section", i + ": 'excludedir'", item, "doesn't exist.\033[0m"
              errorsoccured = True
              exvalid = False
        if exvalid == True:
          if self.dirname[number] in self.excludedir[number]:
            print >> sys.stderr, "\033[31;1mError in section", i + ": 'excludedir'", item, "is the same like 'dirname'.\033[0m"
            errorsoccured = True
          if debug == True:
            print "'excludedir' in section '" + i + "' is valid"
        if self.excludedir[number] == [] or self.excludedir[number] == ['']:
          self.excludedir[number] = None
      else:
        self.excludedir[number] = None
      
      self.excludepattern.append('')
      if parser.has_option(i, excludepattern):
        exvalid = True
        self.excludepattern[number] = parser.get(i, excludepattern)
        try:
          re_compile(self.excludepattern[number])
        except:
          print >> sys.stderr, "\033[31;1mError in section", i + ": 'excludepattern' must be a regular expression.\033[0m"
          errorsoccured = True
        else:
          if debug == True:
            print "'excludepattern' in section '" + i + "' is valid"
        if self.excludepattern[number] == '':
          self.excludepattern[number] = None
      else:
        self.excludepattern[number] = None
      
      if usowchmoderr == True:
        print >> sys.stderr, "\033[31;1mError in section", i + ": With your actual configuration, experms will do exactly nothing.\033[0m"
        errorsoccured = True
    
    if errorsoccured == True:
      print >> sys.stderr, "\033[31;1mAborting!\033[0m"
      sys.exit(1)
Пример #57
0
        CONFIG_FILE = sys.argv.pop(1)
    if DEBUG: print "CONFIG_FILE:", CONFIG_FILE

    config = SafeConfigParser()
    config.read(CONFIG_FILE)
    #print CONFIG_FILE
    cert = config.get('WSAA', 'CERT')
    privatekey = config.get('WSAA', 'PRIVATEKEY')
    cuit = config.get('WSFEv1', 'CUIT')
    if '/entrada' in sys.argv:
        entrada = sys.argv[sys.argv.index("/entrada") + 1]
    else:
        entrada = config.get('WSFEv1', 'ENTRADA')
    salida = config.get('WSFEv1', 'SALIDA')

    if config.has_option('WSAA', 'URL') and not H**O:
        wsaa_url = config.get('WSAA', 'URL')
    else:
        wsaa_url = None
    if config.has_option('WSFEv1', 'URL') and not H**O:
        wsfev1_url = config.get('WSFEv1', 'URL')
    else:
        wsfev1_url = None

    if config.has_option('WSFEv1', 'REPROCESAR'):
        wsfev1_reprocesar = config.get('WSFEv1', 'REPROCESAR') == 'S'
    else:
        wsfev1_reprocesar = None

    if config.has_option('WSFEv1', 'XML_DIR'):
        wsfev1_xml_dir = config.get('WSFEv1', 'XML_DIR')
Пример #58
0
class GlobalSettings(Signallable):
    """
    Global PiTiVi settings.

    The settings object loads settings from three different sources: the
    global configuration, the local configuration file, and the environment.
    Modules declare which settings they wish to access by calling the
    addConfigOption() class method during initialization.

    @cvar options: A dictionnary of available settings.
    @cvar environment: A list of the controlled environment variables.
    """

    options = {}
    environment = set()
    defaults = {}
    __signals__ = {}

    def __init__(self, **kwargs):
        Signallable.__init__(self)
        self._config = SafeConfigParser()
        self._readSettingsFromGlobalConfiguration()
        self._readSettingsFromConfigurationFile()
        self._readSettingsFromEnvironmentVariables()

    def _readSettingsFromGlobalConfiguration(self):
        # ideally, this should read settings from GConf for ex
        pass

    def _readSettingsFromConfigurationFile(self):
        # This reads the configuration from the user configuration file
        try:
            conf_file_path = os.path.join(xdg_config_home(), "pitivi.conf")
            self._config.read(conf_file_path)
        except ParsingError:
            return

        for (section, attrname, typ, key, env, value) in self.iterAllOptions():
            if not self._config.has_section(section):
                continue
            if key and self._config.has_option(section, key):
                if typ == int or typ == long:
                    try:
                        value = self._config.getint(section, key)
                    except ValueError:
                        # In previous configurations we incorrectly stored
                        # ints using float values.
                        value = int(self._config.getfloat(section, key))
                elif typ == float:
                    value = self._config.getfloat(section, key)
                elif typ == bool:
                    value = self._config.getboolean(section, key)
                else:
                    value = self._config.get(section, key)
                setattr(self, attrname, value)

    @classmethod
    def readSettingSectionFromFile(self, cls, section):
        """
        Force reading a particular section of the settings file.

        Use this if you dynamically determine settings sections/keys at runtime
        (like in tabsmanager.py). Otherwise, the settings file would be read
        only once (at the initialization phase of your module) and your config
        sections would never be read, and thus values would be reset to defaults
        on every startup because GlobalSettings would think they don't exist.
        """
        if cls._config.has_section(section):
            for option in cls._config.options(section):
                # We don't know the value type in advance, just try them all.
                try:
                    value = cls._config.getfloat(section, option)
                except:
                    try:
                        value = cls._config.getint(section, option)
                    except:
                        try:
                            value = cls._config.getboolean(section, option)
                        except:
                            value = cls._config.get(section, option)

                setattr(cls, section + option, value)

    def _readSettingsFromEnvironmentVariables(self):
        for (section, attrname, typ, key, env, value) in self.iterAllOptions():
            var = get_env_by_type(typ, env)
            if var is not None:
                setattr(self, attrname, value)

    def _writeSettingsToConfigurationFile(self):
        conf_file_path = os.path.join(xdg_config_home(), "pitivi.conf")

        for (section, attrname, typ, key, env_var, value) in self.iterAllOptions():
            if not self._config.has_section(section):
                self._config.add_section(section)
            if key:
                if value is not None:
                    self._config.set(section, key, str(value))
                else:
                    self._config.remove_option(section, key)
        try:
            file = open(conf_file_path, 'w')
        except IOError, OSError:
            return
        self._config.write(file)
        file.close()
Пример #59
0
class ParserManager(object):
    """ parser manager
    """
    def __init__(self, config={}):
        self.config = config
        self.parser = None
        self.ADC_RULES = self.parse_adc()
        self.SDF_RULES = self.parse_sdf()
        self.PCC_RULES = self.parse_pcc()
        self.debugging_flag = False

    def parse_adc(self):
        """ 
        Description : Function to parse adc configuration file
        Agruments : None
        Return : Adc parse rules dictionary        
        """
        if not self.config.get('ADC_FILE_PATH'):
            print "Input error"
            return {}
        adc_rules = []
        self.parser = SafeConfigParser()
        self.parser.read(self.config.get('ADC_FILE_PATH'))
        for each_section in self.parser.sections():
            # parse only rules ignore rest
            if "ADC_RULE" in each_section:
                res = {}
                adc_type = 0
                ue_ip = "0.0.0.0"
                app_ip = "0.0.0.0"
                src_port = "0:65535"
                dest_port = "0:65535"
                priority = "0x1/0x0"
                prefix = 0
                for (key, val) in self.parser.items(each_section):
                    if "adc_type" in key:
                        adc_type = val
                    if "ip" in key or "domain" in key:
                        app_ip = val
                    if "prefix" in key:
                        prefix = val
                if prefix:
                    app_ip += "/" + str(prefix)
                res = {
                    'RuleID': each_section,
                    'Ue_Ip': ue_ip,
                    'App_Ip': app_ip,
                    'Src_Port': src_port,
                    'Dest_Port': dest_port,
                    'Priority': priority,
                    'Adc_Type': adc_type
                }
                adc_rules.append(res)
        return adc_rules

    def parse_sdf(self):
        """ 
        Description : Function to parse sdf configuration file
        Agruments : None
        Return : Sdf parse rules dictionary   
        """
        self.parser = SafeConfigParser()
        self.parser.read(self.config.get('SDF_FILE_PATH'))
        PCC_RULE_ID = 0
        sdf_rules = []
        for val in self.parser.sections():
            DIRECTION = 'bidirectional'
            LOCAL_IP = '0.0.0.0'
            LOCAL_IP_MASK = 0
            IPV4_REMOTE = '0.0.0.0'
            IPV4_REMOTE_MASK = 0
            LOCAL_LOW_LIMIT_PORT = 0
            LOCAL_HIGH_LIMIT_PORT = 65535
            REMOTE_LOW_LIMIT_PORT = 0
            REMOTE_HIGH_LIMIT_PORT = 65535
            PROTOCOL = hex(0)
            PROTOCOL_MASK = hex(0)

            if val != 'GLOBAL':
                PCC_RULE_ID += 1
                if PCC_RULE_ID > 1:
                    PROTOCOL_MASK = '0xff'
                if self.parser.has_option(val, 'DIRECTION'):
                    DIRECTION = str(self.parser.get(val, 'DIRECTION'))

                if self.parser.has_option(val, 'IPV4_LOCAL'):
                    LOCAL_IP = str(self.parser.get(val, 'IPV4_LOCAL'))

                if self.parser.has_option(val, 'IPV4_LOCAL_MASK'):
                    LOCAL_IP_MASK = str(self.parser.get(
                        val, 'IPV4_LOCAL_MASK'))

                if self.parser.has_option(val, 'IPV4_REMOTE'):
                    IPV4_REMOTE = str(self.parser.get(val, 'IPV4_REMOTE'))

                if self.parser.has_option(val, 'IPV4_REMOTE_MASK'):
                    IPV4_REMOTE_MASK = self.parser.get(val, 'IPV4_REMOTE_MASK')

                if self.parser.has_option(val, 'PROTOCOL'):
                    PROTOCOL = hex(int(self.parser.get(val, 'PROTOCOL')))

                if self.parser.has_option(val, 'PROTOCOL_MASK'):
                    PROTOCOL_MASK = int(self.parser.get(val, 'PROTOCOL_MASK'))

                if self.parser.has_option(val, 'LOCAL_LOW_LIMIT_PORT'):
                    LOCAL_LOW_LIMIT_PORT = int(
                        self.parser.get(val, 'LOCAL_LOW_LIMIT_PORT'))

                if self.parser.has_option(val, 'LOCAL_HIGH_LIMIT_PORT'):
                    LOCAL_HIGH_LIMIT_PORT = int(
                        self.parser.get(val, 'LOCAL_HIGH_LIMIT_PORT'))

                if self.parser.has_option(val, 'REMOTE_LOW_LIMIT_PORT'):
                    REMOTE_LOW_LIMIT_PORT = int(
                        self.parser.get(val, 'REMOTE_LOW_LIMIT_PORT'))

                if self.parser.has_option(val, 'REMOTE_HIGH_LIMIT_PORT'):
                    REMOTE_HIGH_LIMIT_PORT = int(
                        self.parser.get(val, 'REMOTE_HIGH_LIMIT_PORT'))
                if LOCAL_IP_MASK == '255.255.255.255':
                    LOCAL_IP_MASK = 32

                if IPV4_REMOTE_MASK == '255.255.255.255':
                    IPV4_REMOTE_MASK = 32

                sdf_rules.append({
                    'RuleID':
                    val,
                    'Direction':
                    DIRECTION,
                    'Local_IP':
                    LOCAL_IP + "/" + str(LOCAL_IP_MASK),
                    'Local_High_Port':
                    LOCAL_HIGH_LIMIT_PORT,
                    'Local_Low_Port':
                    LOCAL_LOW_LIMIT_PORT,
                    'Local_High_Port':
                    LOCAL_HIGH_LIMIT_PORT,
                    'Remote_IP':
                    IPV4_REMOTE + "/" + str(IPV4_REMOTE_MASK),
                    'Remote_Low_Port':
                    REMOTE_LOW_LIMIT_PORT,
                    'Remote_High_Port':
                    REMOTE_HIGH_LIMIT_PORT,
                    'Protocol':
                    PROTOCOL,
                    'Protocol_Mask':
                    PROTOCOL_MASK
                })

                if DIRECTION == 'bidirectional':
                    sdf_rules.append({
                        'RuleID':
                        val,
                        'Direction':
                        DIRECTION,
                        'Local_IP':
                        IPV4_REMOTE + "/" + str(IPV4_REMOTE_MASK),
                        'Local_Low_Port':
                        LOCAL_LOW_LIMIT_PORT,
                        'Local_High_Port':
                        LOCAL_HIGH_LIMIT_PORT,
                        'Remote_IP':
                        LOCAL_IP + "/" + str(LOCAL_IP_MASK),
                        'Remote_Low_Port':
                        REMOTE_LOW_LIMIT_PORT,
                        'Remote_High_Port':
                        REMOTE_HIGH_LIMIT_PORT,
                        'Protocol':
                        PROTOCOL,
                        'Protocol_Mask':
                        PROTOCOL_MASK
                    })
        return sdf_rules

    def parse_pcc(self):
        """
        Description : Function to parse pcc configuration file
        Agruments : None
        Return : Pcc parse rules dictionary   
        """
        self.parser = SafeConfigParser()
        self.parser.read(self.config.get('PCC_FILE_PATH'))
        PRECEDENCE = 0
        Name = ""
        pcc_rules = []
        for val in self.parser.sections():
            if "PCC_FILTER" in val:
                SDF_FILTER_IDX = None
                ADC_FILTER_IDX = None
                if self.parser.has_option(val, 'SDF_FILTER_IDX'):
                    SDF_FILTER_IDX = str(self.parser.get(
                        val, 'SDF_FILTER_IDX'))
                    SDF_FILTER_IDX = [
                        id.strip() for id in SDF_FILTER_IDX.split(",") if id
                    ]

                if self.parser.has_option(val, 'ADC_FILTER_IDX'):
                    ADC_FILTER_IDX = str(self.parser.get(
                        val, 'ADC_FILTER_IDX'))

                if self.parser.has_option(val, 'PRECEDENCE'):
                    PRECEDENCE = str(self.parser.get(val, 'PRECEDENCE'))

                if self.parser.has_option(val, 'Rule_Name'):
                    Name = str(self.parser.get(val, 'Rule_Name'))

                if self.parser.has_option(val, 'GATE_STATUS'):
                    action1 = str(self.parser.get(val, 'GATE_STATUS'))
                pcc_rules.append({
                    'RuleID': val,
                    'Sdf_ID': SDF_FILTER_IDX,
                    'Adc_ID': ADC_FILTER_IDX,
                    'Precedence': PRECEDENCE,
                    'Name': Name,
                    'Action': action1
                })
        return pcc_rules

    def search_adc(self, direction, ue_ip, app_ip=None):
        """
        Description : Function to search rule in adc config file
        Arguments : direction - packet direction (uplink or downlink)
                    ue_ip - user equipment ip
                    app_ip - application ip
        Result : Matched pcc rule
        """
        if self.debugging_flag:
            print "-------- Searching in ADC -----------"
        filter_adc_rule = []
        for conf in self.ADC_RULES:
            # Check exact ip match
            if conf['Adc_Type'] == '1':
                if app_ip == conf['App_Ip']:
                    filter_adc_rule.append(conf)
            # Check in range
            if conf['Adc_Type'] == '2':
                if Utility.check_ip_range(app_ip, conf['App_Ip']):
                    filter_adc_rule.append(conf)
            # Check for domain
            if conf['Adc_Type'] == '0':
                if app_ip == conf['App_Ip']:
                    filter_adc_rule.append(conf)

        if self.debugging_flag:
            pprint.pprint(filter_adc_rule)

        priority = -1
        filter_pcc = None
        for filter_adc in filter_adc_rule:
            # Get the rule number and search it in pcc rule
            rule_id = filter_adc['RuleID'].split("ADC_RULE_")[1]
            for conf in self.PCC_RULES:
                # check the highest precedence and assign to filter_pcc from pcc
                # rules
                if rule_id == conf['Adc_ID']:
                    if priority == -1:
                        priority = int(conf['Precedence'])
                        filter_pcc = conf
                    else:
                        if priority > int(conf['Precedence']):
                            priority = int(conf['Precedence'])
                            filter_pcc = conf

        if filter_pcc:
            if self.debugging_flag:
                print "Found rule in PCC configration"
            filter_pcc['Filter_Type'] = "ADC"
            if self.debugging_flag:
                pprint.pprint(filter_pcc)
            return filter_pcc
        return {}

    def search_sdf(self, direction, ue_ip=None, app_ip=None):
        """
        Description : Function to search rule in sdf config file
        Arguments : direction - packet direction (uplink or downlink)
                    ue_ip - user equipment ip
                    app_ip - application ip
        Result : Matched sdf rule
        """
        if self.debugging_flag:
            print "-------- Searching in SDF -----------"
        result = []
        tmp = []
        # Verify SDF rule of Uplink direction
        if direction == 'UL':
            for conf in self.SDF_RULES:
                if conf['Direction'] == "uplink_only":
                    if Utility.check_ip_range(ue_ip, conf['Local_IP']):
                        if app_ip:
                            if Utility.check_ip_range(app_ip,
                                                      conf['Remote_IP']):
                                if not conf['RuleID'] in tmp:
                                    tmp.append(conf['RuleID'])
                                    result.append(conf)
        # Verify SDF rule of downling direction
        if direction == "DL":
            ue_ip, app_ip = app_ip, ue_ip
            for conf in self.SDF_RULES:
                if conf['Direction'] == "downlink_only":
                    if Utility.check_ip_range(ue_ip, conf['Remote_IP']):
                        if app_ip:
                            if Utility.check_ip_range(app_ip,
                                                      conf['Local_IP']):
                                if not conf['RuleID'] in tmp:
                                    tmp.append(conf['RuleID'])
                                    result.append(conf)
        if self.debugging_flag:
            pprint.pprint(result)
        filter_pcc = []
        for conf in self.PCC_RULES:
            for res in result:
                rule_id = res['RuleID'].split("SDF_FILTER_")[1]
                flag = True
                if conf['Sdf_ID']:
                    for tmp_rule_id in conf['Sdf_ID']:
                        if rule_id != tmp_rule_id:
                            flag = False
                    if flag:
                        filter_pcc.append(conf)
        # Check the highest precedence
        precedence = -1
        res = None
        if filter_pcc:
            if self.debugging_flag:
                print "Found rule in SDF configration"
            for pcc in filter_pcc:
                if precedence == -1 or precedence > int(pcc['Precedence']):
                    res = pcc
                    precedence = int(pcc['Precedence'])
            res['Filter_Type'] = "SDF"
        if self.debugging_flag:
            pprint.pprint(res)
        return res

    def search_filter(self, direction, ue_ip, app_ip=None):
        """
        Description : Function to calculate actual matched rule with the basis 
                      of adc and sdc rule precedence 
        Arguments : direction - packet direction (uplink or downlink)
                    ue_ip - user equipment ip
                    app_ip - application ip
        Result : Matched rule        
        """
        adc_filter = None
        sdf_filter = None
        try:
            adc_filter = self.search_adc(direction, ue_ip, app_ip)
        except Exception, err:
            print str(err)

        try:
            sdf_filter = self.search_sdf(direction, ue_ip, app_ip)
        except Exception, err:
            print str(err)
Пример #60
0
class Subvolume_Agent(object):
    def __init__(self, num, cfg_file="test_config.cfg"):
        self.num = num
        self.active_fronts = []
        print_with_rank(num, "Online!")

        # static: boundaries
        self.static_constellation = {}

        self.virtual_constellation = {}
        self.neighbor_constellation = {}
        self.distal_constellation = {}
        self.dynamic_constellation = {}

        # substance definitions: both static and dynamic (for now, only dynamic supported)
        self.substances_constellation = {}

        self.parser = SafeConfigParser()
        self.parser.read(cfg_file)
        if self.parser.has_option("system", "recursion_limit"):
            sys.setrecursionlimit(
                self.parser.getint("system", "recursion_limit"))
        """ check the minimum distance between fronts of each entity_type.
            Used to test the validity of front. If not set, the default
            of current_diameter / 2 is used. Otherwise as specified.
            Setting this value in the cfg-file comes handy when using
            a very small increment to achieve "detailed" avoidance/attraction.

            Check once when starting the Subvolume agent.
        """
        allowed_self_dist = {}
        for section in self.parser.sections():
            if section.startswith("cell_type_"):
                if self.parser.has_option(section, "minimum_self_distance"):
                    allowed_self_dist[section] = self.parser.getfloat(\
                                                      section,\
                                                      "minimum_self_distance")
                    print "allowed_s_d[%s]=%f" % (section,
                                                  allowed_self_dist[section])
        self.allowed_self_dist = allowed_self_dist

        if self.parser.has_option("system", "recursion_limit"):
            sys.setrecursionlimit(
                self.parser.getint("system", "recursion_limit"))

        # ready. set. go!
        self._initialize_communication_links()
        self.main_loop()

    def _initialize_communication_links(self):
        self.context = zmq.Context()

        # to communicate through the proxy
        self.psub = self.context.socket(zmq.SUB)
        self.psub.connect("tcp://localhost:%s" %
                          self.parser.getint("system", "proxy_pub_port"))
        self.psub.setsockopt(zmq.SUBSCRIBE, "All")
        self.psub.setsockopt(zmq.SUBSCRIBE, "%06d" % self.num)
        self.ppub = self.context.socket(zmq.PUB)
        self.ppub.connect("tcp://localhost:%s" %
                          self.parser.getint("system", "proxy_sub_port"))

        # register/sync with Admin
        self.socket_push = self.context.socket(zmq.PUSH)
        self.socket_push.connect("tcp://127.0.0.1:%s" %
                                 self.parser.getint("system", "pull_port"))
        self.socket_push.send("SV_%06d online" % self.num)

    def main_loop(self):
        running = True
        while running:
            #[address,message] = self.socket_sub.recv_multipart()
            #print_with_rank(self.num, "Waiting for a message to arrive")
            [address, message] = self.psub.recv_multipart()
            message = pickle.loads(message)
            #print_with_rank(self.num,"addr: "+str(address)+", message >"+str(message)+"<")
            if message == "Done":
                running = False
                print_with_rank(self.num, "going home")
            elif message == "Update":
                print_with_rank(self.num, " update")
            elif message[0] == "Init_SV":
                #print_with_rank(self.num,"Init_SV received")
                self._process_init_sv(message)
            elif message[0] == "Initialize_GEs":
                self._process_initialize_ges(message)
            elif message[0] == "Update":
                self._process_update(message)
            elif message[0] == "Reply_constellation":
                self._process_reply_constellation(message)
            elif message[0] == "Request_constellation":
                self._process_request_constellation(message)
            elif message[0] == "Add_Front":
                self._add_front(message)

    def _process_update(self, message):
        print_with_rank(self.num,
                        " received Update from admin (cycle: %s)" % message[1])

        self.update_cycle = int(message[1])
        all_summarized_constellations = message[2]

        # clear the distal_constellation & neighbor_constellation from the previous update cycle
        self.distal_constellation = {}
        self.neighbor_constellation = {}

        try:
            total_l = sum([
                len(v)
                for k, v in all_summarized_constellations[1].iteritems()
            ])
            no_keys = len(all_summarized_constellations[1].keys())

            print_with_rank(self.num, "L(all_summarized_constellations): %i, no_keys: %i" % \
                           (total_l,no_keys))
            # print_with_rank(self.num, "all_sum_con[1].keys: " + str(all_summarized_constellations[1].keys()))
            # print_with_rank(self.num, "all_sum_con[2].keys: " + str(all_summarized_constellations[2].keys()))
        except Exception:
            pass

        # 2014-08-06
        """
        Special MERGE needed for distal:
        Onlt use those keys that are NOT in DYNAMIC. (because this information is close)
        Or, make the summary so to only include dynamic, and not distal
        """
        for num in all_summarized_constellations:
            if not num == self.num:
                if not num in self.neighbors:
                    self.distal_constellation = \
                        self._merge_constellations(self.distal_constellation,\
                                                   all_summarized_constellations[num])
                    #print "all_s_constel: ", all_summariz_constellations[num]

        # print "distal_constel: ", self.distal_constellation

        # deal with the special case of having only one SV
        if len(self.neighbors) == 0:
            self._perform_update()
        else:
            self._ask_neighboring_constellations()
            self._constellation_responses = 0

    def _ask_neighboring_constellations(self):
        for dest in self.neighbors:
            message = ("Request_constellation", self.num)
            self.ppub.send_multipart(["%06d" % dest, pickle.dumps(message)])

    def _process_request_constellation(self, message):
        ret_dest = message[1]
        # total_l = sum([len(v) for k,v in self.expanded_constellation.iteritems()])
        # no_keys = len(self.expanded_constellation.keys())
        # print_with_rank(self.num, "L(expanded constellation): %i, no_keys: %i" % \
        #                (total_l,no_keys))
        """
        # 2014-08-06
        maybe I should also send the static constellation???
        NO!!! The static environment is known to all and needs not to be send anymore.
        every SV knows the complete substrate; this might cause issues in the future when the substrate can become
        very large. in principle: if there is nothing in the neighborhood, nothing should be know.
        but because of the "in the eye of the beholder" interaction principle, it is up to the
        developing front to decide what is "near" and what is "distal"
        """
        ret_message = ("Reply_constellation", self.dynamic_constellation)
        self.ppub.send_multipart(
            ["%06d" % ret_dest, pickle.dumps(ret_message)])

    def _process_reply_constellation(self, message):
        # merge received constellation in an expanded one
        temp_con = message[1]

        # try:
        #     total_l = sum([len(v) for k,v in temp_con.iteritems()])
        #     no_keys = len(temp_con.keys())
        #     print_with_rank(self.num, "L(temp_con): %i, no_keys: %i" % \
        #                    (total_l,no_keys))
        #     print_with_rank(self.num, "temp_con.keys: " + str(temp_con.keys()))
        #     print_with_rank(self.num, "temp_con.keys: " + str(temp_con.keys()))
        # except Exception:
        #     pass

        # 2014-08-06
        self.neighbor_constellation = self._merge_constellations(
            self.neighbor_constellation, temp_con)
        #print "received temp_con: ", temp_con

        self._constellation_responses = self._constellation_responses + 1

        # if this is the last answer, self._perform_update()
        if self._constellation_responses == len(self.neighbors):
            self._perform_update()

    def _merge_constellations(self, d1, d2):
        for key in d2:
            if key in d1:
                d1[key] = d1[key].union(
                    d2[key]
                )  # sets only allow unique entries. Front-objects are now hashable
            else:
                d1[key] = d2[key]
        return d1

    def _process_init_sv(self, message):
        # initialize the seed, make the seed unqiue by adding the SV ID
        np.random.seed(self.parser.getint("system", "seed") + self.num)

        #print_with_rank(self.num,"processing init message: " + str(message) )
        print_with_rank(self.num,
                        "got virtual w/ keys: " + str(message[4].keys()))
        #time.sleep(2)
        boundary = message[1]
        neighbors = message[2]
        static_constellation = message[3]
        virtual_constellation = message[4]
        self.boundary = boundary
        self.neighbors = neighbors

        # 2014-08-06
        self.static_constellation = static_constellation
        self.virtual_constellation = virtual_constellation

        print_with_rank(self.num,
                        "INIT_SV, static: " + str(static_constellation.keys()))
        print_with_rank(
            self.num,
            "INIT_SV, static: " + str(self.static_constellation.keys()))

    def _process_initialize_ges(self, message):
        print_with_rank(self.num, "message: " + str(message))
        entries = message[1]
        for details in entries:
            algo_name = details["algorithm"]
            entity_name = details["entity_name"]
            radius = details["radius"]
            soma_xyz = details["soma_xyz"]
            new_front = Front(entity_name,algo_name,soma_xyz,radius,\
                              0,0) # 0: pathlength, 0: order
            new_front.soma_pos = soma_xyz

            # self.fronts.append(new_front)

            # # 2014-02-19: in principle the "all_contained" can be deleted once
            # # I include the diameters in all constellations
            # self.all_contained_entities[entity_name] = []
            # self.all_contained_entities[entity_name].append(new_front)

            # 2014-08-06
            if entity_name in self.dynamic_constellation:
                print "Help! I am overwriting something"
                time.sleep(10)
            self.dynamic_constellation[entity_name] = set()
            self.dynamic_constellation[entity_name].add(
                new_front)  #append((soma_xyz,radius))
            self.active_fronts.append(new_front)

    def _get_pos_only_constellation(self, c):
        new_c = {}
        for key in c:
            pos_only = [f.xyz for f in c[key]]
            new_c[key] = pos_only
        return new_c

    def _perform_update(self, debug_mem=False):
        # real deal
        #merged_constellation = copy.deepcopy(self.static_constellation)
        merged_constellation = copy.copy(self.static_constellation)
        merged_constellation = self._merge_constellations(
            merged_constellation, self.dynamic_constellation)
        merged_constellation = self._merge_constellations(
            merged_constellation, self.neighbor_constellation)
        merged_constellation = self._merge_constellations(
            merged_constellation, self.distal_constellation)
        merged_constellation = self._merge_constellations(
            merged_constellation, self.substances_constellation)

        pos_only_constellation = self._get_pos_only_constellation(
            merged_constellation)

        new_fronts = []
        # randomize order
        np.random.shuffle(self.active_fronts)
        changes = []
        all_synapse_locs = []
        for i, front in zip(range(len(self.active_fronts)),
                            self.active_fronts):
            print_with_rank(self.num, "i= " + str(i) + ": " + str(front))

            c_seed = 0  #np.random.randint(0,1000) #+self.num*(self.update_cycle + i)

            front.update_cycle = self.update_cycle
            ret = front.extend_front(c_seed, pos_only_constellation,
                                     self.virtual_constellation)
            if isinstance(ret, tuple):
                # front is trying to update the environment
                # store the update information, likely in my_constellation?
                update_info = ret[1]
                # print "update_info: ", update_info
                entity_name = update_info.keys()[0]
                entity_front = update_info[entity_name]  # 2014-08-11

                # # 2014-02-19
                # 2014-08-11
                # store and...
                # ... update the current pos_only_constellation
                if entity_name in self.substances_constellation:
                    self.substances_constellation[entity_name].append(
                        entity_front)
                    pos_only_constellation[entity_name].add(entity_front.xyz)
                else:
                    self.substances_constellation[entity_name] = set()
                    self.substances_constellation[entity_name].add(
                        entity_front)
                    pos_only_constellation[entity_name] = []
                    pos_only_constellation[entity_name].append(
                        entity_front.xyz)

                # set the ret for subsequent processing (without updating my code)
                ret = ret[0]
                #print_with_rank(self.num, "SECRETION NOT YET IMPLEMENTED")
                #time.sleep(100)

            elif isinstance(ret, list) or ret == None:
                pass  # front is only extending
            else:
                print "ret: ", ret
                print "type: ", type(ret)
                print "extend_front must return either list \
                (for continuation, branch or termination or tuple (list and dict)"

                sys.exit(0)
            if ret == None:
                # that's the end of this front
                pass
            else:
                for f in ret:
                    if self._within_this_volume(f.xyz):
                        """TODO: PERFORM CHECK: can this front be added at this location
                        If yes: ok
                        If not: wiggle a few times and check each time, if too difficut: discard front
                        """
                        valid, syn_locs = self._valid_and_wiggle(f)

                        if valid:
                            new_fronts.append(f)
                            all_synapse_locs.extend(syn_locs)
                            # self.my_constellation[f.entity_name].append(f.xyz)

                            self.dynamic_constellation[f.entity_name].add(f)
                            pos_only_constellation[f.entity_name].append(f.xyz)
                        else:
                            print "NOT VALID TO ADD THIS POINT"
                            pass
                    else:
                        # print_with_rank(self.num,"front(%s) not in this SV (%s)" % (str(f.xyz),str(self.boundary)))
                        # make message and send to admin, admin then distributes to correct subvolume
                        message = ("Migrate_Front", f, front
                                   )  # send new front and parent
                        self.ppub.send_multipart(
                            ["Admin", pickle.dumps(message)])
                # self._temp_to_db(front,ret)
                changes.append((front, ret))
        self.active_fronts = new_fronts

        #self.socket_push.send(pickle.dumps(("Update_OK",self.my_constellation)))
        """should I only send a summary of dynamic and substances?
        distal and neighbors are not interesting to send and static is already known from the start to all others
        so yes,
        """
        core_constellation = {}
        core_constellation = self._merge_constellations(
            core_constellation, self.dynamic_constellation)
        core_constellation = self._merge_constellations(
            core_constellation, self.substances_constellation)
        core_pos_only_constellation = self._get_pos_only_constellation(
            core_constellation)

        # 2014-08-12
        # my_summarized_constellation = self._summarize_constellation(pos_only_constellation)
        my_summarized_constellation = self._summarize_constellation(
            core_pos_only_constellation)

        msg = ("Update_OK", "%06d" % self.num, changes, all_synapse_locs,
               my_summarized_constellation)
        self.ppub.send_multipart(["Admin", pickle.dumps(msg)])

        if debug_mem:
            self._gather_constellation_size(merged_constellation)

    def _valid_and_wiggle(self, f):
        valid, syn_locs = self._is_front_valid(
            f, check_synapses=self.parser.has_option("system", "syn_db"))
        attempts = 0
        avoidance_attempts = 0
        if self.parser.has_option("system", "avoidance_attempts"):
            avoidance_attempts = self.parser.getint("system",
                                                    "avoidance_attempts")

        while valid == False and attempts < avoidance_attempts:
            # wiggle front.xyz a bit...
            noise = (2 * f.radius) * np.random.random(len(f.xyz)) - f.radius
            f.xyz = f.xyz + noise
            valid, syn_locs = self._is_front_valid(
                f, check_synapses=self.parser.has_option("system", "syn_db"))
            attempts = attempts + 1
        return valid, syn_locs

    def _gather_constellation_size(self, merged_constellation):
        """
        # real deal
        merged_constellation = self.static_constellation
        merged_constellation = self._merge_constellations(merged_constellation,self.dynamic_constellation)
        merged_constellation = self._merge_constellations(merged_constellation,self.neighbor_constellation)
        merged_constellation = self._merge_constellations(merged_constellation,self.distal_constellation)
        merged_constellation = self._merge_constellations(merged_constellation,self.substances_constellation)
        
        pos_only_constellation = self._get_pos_only_constellation(merged_constellation)        
        """
        try:
            mc_total_l = sum(
                [len(v) for k, v in merged_constellation.iteritems()])
            mc_no_keys = len(merged_constellation.keys())

            stc_total_l = sum(
                [len(v) for k, v in self.static_constellation.iteritems()])
            stc_no_keys = len(self.static_constellation.keys())

            dyc_total_l = sum(
                [len(v) for k, v in self.dynamic_constellation.iteritems()])
            dyc_no_keys = len(self.dynamic_constellation.keys())

            nc_total_l = sum(
                [len(v) for k, v in self.neighbor_constellation.iteritems()])
            nc_no_keys = len(self.neighbor_constellation.keys())

            dc_total_l = sum(
                [len(v) for k, v in self.distal_constellation.iteritems()])
            dc_no_keys = len(self.distal_constellation.keys())

            sc_total_l = sum(
                [len(v) for k, v in self.substances_constellation.iteritems()])
            sc_no_keys = len(self.substances_constellation.keys())

            ss = "\nDEBUG: STC K=: %i, L: %i \n" % (stc_no_keys, stc_total_l)
            ss += "DEBUG: DYC K=: %i, L: %i \n" % (dyc_no_keys, dyc_total_l)
            ss += "DEBUG: NC K=: %i, L: %i \n" % (nc_no_keys, nc_total_l)
            ss += "DEBUG: DC K=: %i, L: %i \n" % (dc_no_keys, dc_total_l)
            ss += "DEBUG: SUC K=: %i, L: %i \n" % (sc_no_keys, sc_total_l)
            ss += "DEBUG: TOTAL K=: %i, L: %i \n" % (mc_no_keys, mc_total_l)
            print_with_rank(self.num, ss)
            # print_with_rank(self.num, "DEBUG: STC K=: %i, L: %i" % \
            #                (stc_no_keys,stc_total_l))
            # print_with_rank(self.num, "DEBUG: DYC K=: %i, L: %i" % \
            #                (dyc_no_keys,dyc_total_l))
            # print_with_rank(self.num, "DEBUG: NC K=: %i, L: %i" % \
            #                (nc_no_keys,nc_total_l))
            # print_with_rank(self.num, "DEBUG: DC K=: %i, L: %i" % \
            #                (dc_no_keys,dc_total_l))
            # print_with_rank(self.num, "DEBUG: SC K=: %i, L: %i" % \
            #                (sc_no_keys,sc_total_l))

            # print_with_rank(self.num, "DEBUG: TOTAL MC K=: %i, L: %i" % \
            #                (mc_no_keys,mc_total_l))
            print_with_rank(
                self.num,
                "static keys: " + str(self.static_constellation.keys()))
        except Exception, e:
            print "CANNOT DETERMINE CONSTELLATION SIZE"
            print e
            time.sleep(20)