Example #1
0
class Parse_MitmConfig():

    def __init__(self, configFile):
        self.config = ConfigParser()
        self.configFile = configFile
        self.trigger_parsing()

    '''
    trigger_parsing is a seperate function, to reread the configuration file if needed. This feature might be implemented in the future
    '''

    def trigger_parsing(self):

        # TODO Check if file exists
        self.config.readfp(open(self.configFile))

        self.dst_ip = self.config.get('Destination', 'ip')
        self.dst_mac = self.config.get('Destination', 'mac')
        self.dst_port = self.config.get('Destination', 'port')

        self.src_ip = self.config.get('Source', 'ip')
        self.src_mac = self.config.get('Source', 'mac')
        self.src_port = self.config.get('Source', 'port')

        self.bridge0_interface = self.config.get('Interfaces', 'bridge0')
        self.bridge1_interface = self.config.get('Interfaces', 'bridge1')
        self.mitm_interface = self.config.get('Interfaces', 'mitm')
Example #2
0
 def parseConfigurationFile(self, configFile):
     # Initialize defaults
     defaultParams = {}
     
     # We have the option of collecting stats during burnin
     # TODO: RENAME THESE
     defaultParams["collect_burnin_stats_intvl"] = 100
     defaultParams["results_dir"] = "."
     defaultParams["burnin"] = 10000
     defaultParams["samples"] = 1000
     defaultParams["thin"] = 10
     defaultParams["restarts"] = 1
     
     # Create a config parser object and read in the file
     cfgParser = ConfigParser(defaultParams)
     cfgParser.read(configFile)
     
     self.params = {}
     self.params["results_dir"]  = cfgParser.get("io", "results_dir")
     self.params["results_file"] = cfgParser.get("io", "results_file")
     
     self.params["samples"]      = cfgParser.getint("MCMC", "samples")
     self.params["samples_thin"] = cfgParser.getint("MCMC", "thin")
     self.params["burnin"]       = cfgParser.getint("MCMC", "burnin")
     self.params["burnin_thin"]  = cfgParser.getint("output", "collect_burnin_stats_intvl")
     self.params["restarts"]     = cfgParser.getint("MCMC", "restarts")
     
     # Update N_samples and N_burnin
     self.params["N_samples"] = self.params["samples"] * self.params["restarts"]
     self.params["N_burnin"] =  np.ceil(self.params["burnin"] / self.params["burnin_thin"]) * self.params["restarts"]  
             
     # Basic error checking
     if not os.path.exists(self.params["results_dir"]):
         log.error("Results directory does not exist!")
         raise Exception("Invalid parameter: io.results_dir %s" % self.params["results_dir"])
Example #3
0
class handlers:
    def __init__(self, root):
        self.root = root

        ## quit out of everything

    def Quit(self):
        print "quiting...\n"
        sys.exit()

    def Record(self):
        sys.stdout.write("Cant record yet")
        sys.Quit()

    def SaveConfig(self):
        sys.stdout.write("TBD")
        sys.Quit()

    def LoadIniData(self, FileName):
        self.cp = ConfigParser()
        try:
            self.cp.readfp(open(FileName, "r"))
            # f.close()
        except IOError:
            raise Exception, "NoFileError"
        return
Example #4
0
def mf_construct_model_extensions(baseModel, configFile):
    """
    Initialize a model by iterating through the config file
    """
    extensions = {}
    
    cfgParser = ConfigParser()
    cfgParser.read(configFile)
    
    # Iterate over each section. If it is one of the recognized types,
    # call the constructor for that section
    for section in cfgParser.sections():
        params = dict(cfgParser.items(section))
        model = None
        if section == "cluster_model":
            model = mfConstructClusterModel(baseModel, configFile, params)
        elif section == "location_model":
            model = mfConstructLocationModel(baseModel, configFile, params)
    
        # Add this model to the extensions dictionary
        if model != None:
            if section not in extensions.keys():
                extensions[section] = model
            else:
                # If there is already an extension with this section name, append this to the list
                # This is to support multiple clusters or location models
                extensions[section] = list(extensions[section]) + [model]
                
    return extensions
Example #5
0
    def __init__(self, configFile=None):
        """
        Initialize all values and setup
        :param configFile:
        """

        if configFile is None:
            configFile = u"rmq_settings.conf"

        Config = ConfigParser()
        Config.read(configFile)

        self.vhost = ConfigSectionMap(u"Send", Config)[u'vhost']
        self.queue = ConfigSectionMap(u"Send", Config)[u'queue']
        self.routing_key = ConfigSectionMap(u"Send", Config)[u'routing_key']
        self.exchange = ConfigSectionMap(u"Send", Config)[u'exchange']

        self.host = ConfigSectionMap(u"Send", Config)[u'host']
        self.port = int(ConfigSectionMap(u"Send", Config)[u'port'])

        username = ConfigSectionMap(u"Send", Config)[u'username']
        password = ConfigSectionMap(u"Send", Config)[u'password']
        credentials = pika.PlainCredentials(username, password)

        self.connection = pika.BlockingConnection\
        (
            pika.ConnectionParameters(host=self.host, port=self.port, virtual_host=self.vhost, credentials=credentials)
        )

        self.channel = self.connection.channel()
Example #6
0
    def build_packets(self, dp):
        #
        config_dir, working_set = ConfigParser.get_working_set(self.CONFIG_FILE)
        #
        for filename in working_set:
            #
            full_filename = config_dir + '/' + filename
            print "------------------------------------------------------------"
            print "processing file: %s" %  full_filename
            print "------------------------------------------------------------"
            #
            config = ConfigParser.get_config(full_filename)
            #
            for type in ConfigParser.get_config_type(config):
                #
                if (type == "flow_mod"):
                    mod_config = ConfigParser.get_flow_mod(config)
                    mod = Mods.create_flow_mod(dp, mod_config)
                    #
                elif (type == "group_mod"):
                    #
                    mod_config = ConfigParser.get_group_mod(config)
                    mod = Mods.create_group_mod(dp, mod_config)

            print "mod len: %i" % sys.getsizeof(mod)
            dp.send_msg(mod)
            print "message sent"
Example #7
0
class PluginManager:
    def __init__(self, cfgFile):
        self.mainPlugin = None
        self.loaded = {}
        self.config = ConfigParser()
        self.configFile = cfgFile
        self.rehash()

    def getProcesses(self):
        """Returns a list of all plugins, as well as the main plugin"""
        return self.loaded.values() + [self.mainPlugin]

    def launchPlugin(self, name):
        """Launches a new plugin, returning the plugin if it succeded"""
        p = Plugin(name, self.config.get(name,"exec"))
        print "Launched",name
        if p.isRunning():
            return p
        print "Failed."
        return None

    def rehash(self):
        """reread the config file"""
        self.config.read(self.configFile)
        
    def load(self, name):
        """Loads a plugin and stores it in the plugin table"""
        print "load:",name
        if self.loaded.has_key(name):
            print "Already loaded, reloading."
            self.unload(name)

        plugin = self.launchPlugin(name)

        if plugin:
            self.loaded[name] = plugin
        else:
            print "Failed to load"

    def unload(self, name):
        """Stops a plugin and removes it from the plugin table"""
        print "unload:",name
        if not self.loaded.has_key(name):
            print "Not loaded. Skipping."
            return
        self.loaded[name].stop()
        del self.loaded[name]
        print "Unloaded"
    
    def startMain(self):
        """Starts the main exec and returns whether it was a successful start"""
        self.mainPlugin = self.launchPlugin("main")
        return self.mainPlugin is not None

    def startPlugins(self):
        """Starts the (non main) plugins and stores them all in the plugin table"""
        for plugin in self.config.sections():
            if plugin != "main":
                self.load(plugin)
Example #8
0
def python3parser():
    '''
    Return a ConfigParser (Python 3)
    '''
    from configparser import ConfigParser
    cp3 = ConfigParser()
    cp3.optionxform = str
    return cp3
Example #9
0
def load_files(filenames, **kwds):
    '''
    Parse configuration files, return configuration data structure. 
    '''
    cfg = ConfigParser()
    cfg.read(filenames)
    d = dictify(cfg)
    return interpolate(d, **kwds)
Example #10
0
def send_text(sender, recipient, text_message, config=None):
    if not config:
        config = ConfigParser('config/development.cfg')

    client = TwilioRestClient(config.get('twilio', 'account_sid'),
                              config.get('twilio', 'auth_token'))
    sms = client.sms.messages.create(body=text_message,
                                      to=recipient,
                                      from_=sender)
Example #11
0
 def __init__ (self,filename="timba.conf",
               system_paths=_default_system_paths,
               user_path=_default_user_path):
   self.syscp = ConfigParser();
   system_paths = [ os.path.join(path,filename) for path in system_paths ];
   self.syscp.read(system_paths);
   self.usercp = ConfigParser();
   self._user_file = os.path.join(user_path,"."+filename);
   self.usercp.read([self._user_file]);
Example #12
0
def read_configuration(section):
	
	cp = ConfigParser()
	cp.read(config_file)

	#options = {}
	#for i in cp.items('Parser'):
	#	options[i[0]] = i[1]
	return dict(cp.items(section))
Example #13
0
def get_common_config():
    from os.path import expanduser

    ROOT_PATH = expanduser("~")
    CONTACT_FILE_LOC = os.path.join(ROOT_PATH, ".contacts.pkl")
    CONFIG_FILE_LOC = os.path.join(ROOT_PATH, ".config.ini")
    config = ConfigParser()
    config.read(CONFIG_FILE_LOC)
    return config
Example #14
0
def update_config(configfile=None, modulename=None):
    """Update the system configuration from given config file and/or module.

    'configfile' is a ConfigObj (INI-style) config file, 'modulename' a module
    path in dotted notation. The function looks for files with a ".cfg"
    extension if the given module name refers to a package directory or a file
    with the base name of the right-most part of the module path and a ".cfg"
    extension added.

    If both 'configfile' and 'modulname' are specified, the module is read
    first, followed by the config file. This means that the config file's
    options override the options in the module file.

    """

    defaults = config_defaults()

    configdata = ConfigObj(unrepr=True)
    if modulename:
        lastdot = modulename.rfind('.')
        firstdot = modulename.find('.')
        packagename = modulename[:lastdot]
        top_level_package = modulename[:firstdot]
        modname = modulename[lastdot+1:]
        modfile = resource_filename(packagename, modname + '.cfg')
        if not os.path.exists(modfile):
            modfile = resource_filename(packagename, modname)
        if os.path.isdir(modfile):
            configfiles = glob.glob(os.path.join(modfile, '*.cfg'))
        else:
            configfiles = [modfile]

        top_level_dir = os.path.normpath(resource_filename(top_level_package, ''))

        package_dir = os.path.normpath(resource_filename(packagename, ''))

        defaults.update(dict(top_level_dir=top_level_dir,
                             package_dir=package_dir))
        
        # Update Python logging config
        for pathname in configfiles:
            if 'app.cfg' in pathname:
                parser = ConfigParser()
                conf = parser.dict_from_files(pathname, vars=defaults)
            else:
                obj = ConfigObj(pathname, unrepr=True)
                obj.merge(dict(DEFAULT=defaults))
                conf = obj.dict()
            configdata.merge(conf)

    if configfile:
        obj = ConfigObj(configfile, unrepr=True)
        obj.merge(dict(DEFAULT=defaults))
        conf = obj.dict()
        configdata.merge(conf)
    update(configdata.dict())
Example #15
0
def load(section, option, archive=_ConfigFile):
  """
    Load variable
  """
  cfg = ConfigParser()
  try:
    cfg.readfp(file(archive))
  except Exception, e:
    sys.stderr.write("%s, %s\n" % (archive, e.strerror))
    return
Example #16
0
 def __init__(self):
     parser = ConfigParser()
     parser.read('config.ini')                              
     #sets the key bindings
     keys = parser.get('controls', 'keys')           
     keyslist = keys.split(',')
     
     for i in range(len(keyslist)):
         self.accept(keyslist[i],self.hitcountfunction) 
         print "The key '"+keyslist[i]+"' has been added."
def _tryLoadConfig():
    'load the config from file'
    c = ConfigParser()
    if c.read('gateway.ini'):
        IOT.GatewayId = c.get('general', 'gatewayId')
        IOT.ClientId = c.get('general', 'clientId')
        IOT.ClientKey = c.get('general', 'clientKey')
        return True
    else:
        return False
Example #18
0
def save(pom, APPPATH, CONFIG):
        """
        Method to save the changes (volume, song,...) in the configuration file
        (By default pomfig.cfg)

        @param pom The Pomodoro to set the options
        @type pom Pomodoro

        @param APPPATH The path to the apply
        @type APPPATH String

        @param CONFIG The name of the config archive (*.cfg)
        @type CONFIG String
        """
        cfg = ConfigParser()

        cfg.read(APPPATH + '/' + CONFIG)

        cfg.set("configuration", "volume", pom.volume)
        if(pom.filemp3 != ""):
                cfg.set("configuration", "song", pom.filemp3)

        f = open(APPPATH + '/' + CONFIG, "w")
        cfg.write(f)
        f.close()
 def save_config(self, prop_name, prop_value):
     # TODO make the operation an transaction.
     config = ConfigParser()
     if os.path.exists(self.config_file_path):
         config.read(self.config_file_path)
     # read values from a section
     if not config.has_section(self.azure_crypt_config_section):
         config.add_section(self.azure_crypt_config_section)
     config.set(self.azure_crypt_config_section, prop_name, prop_value)
     with open(self.config_file_path, "wb") as configfile:
         config.write(configfile)
Example #20
0
    def LoadLabels(self,NEW_LABS = None):
        '''
        Need to comment further. Ambiguous input
        
        Capable of loading several different types of labels depending on the NEW_LABS has
        
        ARG: NEW_LABS     TYPE: None,string,list,np.ndarray    DESC: TODO
        '''
        if self.DATA_LOADED == False:
            self.NewInput()
        #if called default: a random label is created 
        #This is mostly for testing
        if NEW_LABS is None:
            if self.CONFIG_LOADED:
                self.LABELS = np.random.randint(0,self.NUM_CLASS, (np.size(self.TRNG_DATA,0),1))
                self.LABELS = np.transpose(np.sort(self.LABELS))
            
            else:
                self.LoadStackConfig()
                self.LABELS = np.random.randint(0,self.NUM_CLASS, (np.size(self.TRNG_DATA,0),1))
                self.LABELS = np.transpose(self.LABELS)
        
        #if called witha string: function checks to see if that string
        # says 'load' and if so loads the file using the name in the config
        # file. If not, it will try to use that string as a label vector
        #and if that fails, a blank list is returned
        # Maybe I over thought this whole thing
        elif type(NEW_LABS) is str:
            if NEW_LABS == 'load':
                cp = ConfigParser()
                LABFILE_NAME = cp.get('ASCII_DATA', 'labels_filename')
                LABFILE = open(LABFILE_NAME, 'rb')
                self.LABELS = pickle.load(LABFILE)
                self.LABELS = np.transpose(self.LABELS)

            else:
                try:
                    self.LABELS = np.array(list(NEW_LABS))
                    self.LABELS = np.transpose(self.LABELS)
                except ValueError('Not not valid labels'):
                    return []                    
        #if called with a list: the function will convert the list into an array          
        elif type(NEW_LABS) is list:
            self.LABELS = np.asarray(NEW_LABS)
            self.LABELS = np.transpose(self.LABELS)
            
        #if function is called with an np.ndarray. the array is stored in the 
        #class parameter
        elif type(NEW_LABS) is np.ndarray:
            self.LABELS = NEW_LABS
        
        #if called with AAAnything else: the function will fail/
        else:
            raise TypeError('Labels can be of type None, strings, lists, or numpy.ndarray')
Example #21
0
 def writeINI(self, filename):
     """Write the version map to an INI file.
     """
     cp = ConfigParser()
     for key, value in self.iteritems():
         cp.set('versions', key, value)
     fo = open(filename, 'wb')
     try:
         cp.write(fo)
     finally:
         fo.close()
Example #22
0
    def readINI(self, filename):
        """Read the version map from an INI file.
        """
        cp = ConfigParser()
        fi = open(filename, 'rb')
        try:
            cp.read(fi)
        finally:
            fi.close()

        self.update(cp.items('versions'))
Example #23
0
def load_text(text, **kwds):
    '''
    Parse configuration text, return configuration data structure. 
    '''
    cfg = ConfigParser()
    if hasattr(cfg, 'read_string'):
        cfg.read_string(text)
    else:
        import io
        cfg.readfp(io.BytesIO(text))
    d = dictify(cfg)
    return interpolate(d, **kwds)
Example #24
0
def map_config(configparser, section):
    dict1 = {}
    options = configparser.options(section)
    for option in options:
        try:
            dict1[option] = configparser.get(section, option)
            if dict1[option] == -1:
                print("skip: %s" % option, f=sys.stderr)
        except:
            print("exception on %s!" % option)
            dict1[option] = None
    return dict1
Example #25
0
 def cpm(self, task): 
   global time
   global timer
   parser = ConfigParser()
   parser.read('config.ini')
   timer = int(parser.get('settings','timer'))
   time = task.time      
   if task.time < timer:
     task.time and \
     bpmText.setText("BPM: "+str(round((hitcount / ( task.time / 60 )))))
     osuText.setText("osu!: "+str(round((hitcount / ( (task.time + .01) / 60 ))/4))) 
   return task.cont 
 def save_configs(self, key_value_pairs):
     config = ConfigParser()
     if os.path.exists(self.config_file_path):
         config.read(self.config_file_path)
     # read values from a section
     if not config.has_section(self.azure_crypt_config_section):
         config.add_section(self.azure_crypt_config_section)
     for key_value_pair in key_value_pairs:
         if key_value_pair.prop_value is not None:
             config.set(self.azure_crypt_config_section, key_value_pair.prop_name, key_value_pair.prop_value)
     with open(self.config_file_path, "wb") as configfile:
         config.write(configfile)
 def getContext(self):
     """Create an SSL context.
     This will load SSL Public and private certs from the file.
     """
     config = ConfigParser()
     apppath = os.environ['ZIVIOSAGENTHOME']
     config.read(apppath+"/config/ZiviosAgentManager.ini")
     cafile = config.get("general","sslcert")
     prvfile = config.get("general","sslprivatekey")
     ctx = SSL.Context(SSL.SSLv23_METHOD)
     ctx.use_certificate_file(cafile)
     ctx.use_privatekey_file(prvfile)
     return ctx
Example #28
0
def getConfigurationFromINI():
	"""
	Return a dictionary of section with the configuration read from the ini file merged with the defaults passed.
	Various ini location are tried.
	"""
	defaults = _getConfigurationDefaults()
	ini = ConfigParser()
	x = os.path.join(_ROOT, 'serclient.ini')
	v = ini.read([x, '/opt/serclient/serclient.ini'])
	c = dict(ini._sections)
	for k, v in defaults.items():
		if k in c.keys():
			v.update(c[k])
	return defaults
Example #29
0
def DobOpcii(CP, IC, sekcii):
	"""
	Обработка ошибки NoOptionError, которая может возникнуть при различии версии модуля и его настроек.
	DobOpcii(CP, IC, sekcii)
	IC - Файл из которого будут взяты недостающие настройки
	sekcii - Список Секций которые будут обрабатыватся
	"""
	if os.path.isfile(os.path.abspath(IC)):
		ICF = ConfigParser(); ICF.readfp(open(IC, "r"))
		for x in sekcii:
			Sopcii = CP.options(x)
			for y in ICF.options(x):
				if not y in Sopcii:
					CP.set(x, y, ICF.get(x,y))
Example #30
0
    def LoadLabels(self, NEW_LABS=None):
        '''
        DESCRPT:
        PRECOND:
        POSTCON:
        IN ARGS:
        RETURNS:
        NOTES:
        '''

        if self.DATA_LOADED == False:
            self.NewInput()

        if NEW_LABS is None:
            if self.CONFIG_LOADED:
                self.TRNG_LABS = np.random.randint(0, self.NUM_CLASS, (np.size(self.TRNG_DATA, 0), 1))
                self.TRNG_LABS = np.transpose(np.sort(self.TRNG_LABS))

            else:
                self.LoadStackConfig()
                self.TRNG_LABS = np.random.randint(0, self.NUM_CLASS, (np.size(self.TRNG_DATA, 0), 1))
                self.TRNG_LABS = np.transpose(self.TRNG_LABS)

        elif type(NEW_LABS) is str:
            if NEW_LABS == 'load':
                cp = ConfigParser()
                LABFILE_NAME = cp.get('ASCII_DATA', 'labels_filename')
                LABFILE = open(LABFILE_NAME, 'rb')
                self.TRNG_LABS = pickle.load(LABFILE)
                self.TRNG_LABS = np.transpose(self.TRNG_LABS)
            else:
                try:
                    self.TRNG_LABS = np.array(list(NEW_LABS))
                    self.TRNG_LABS = np.transpose(self.TRNG_LABS)
                except ValueError('Not not valid labels'):
                    return []

        elif type(NEW_LABS) is list:
            self.TRNG_LABS = np.asarray(NEW_LABS)
            self.TRNG_LABS = np.transpose(self.TRNG_LABS)

        elif type(NEW_LABS) is np.ndarray:
            self.TRNG_LABS = NEW_LABS

        else:
            raise TypeError('Labels can be of type None, strings, lists, or numpy.ndarray')

        if self.TRNG_LABS is not None:
            self.CHECKS['labels_loaded'] = True
#

basename = os.path.splitext(os.path.basename(options.config_file))[0]
log_fh = open(basename + '.pipeline.log', 'w')
# FIXME: the following code uses obsolete CVS ID tags.
# It should be modified to use git version information.
print >> log_fh, "$Id$\nInvoked with arguments:"
for name_value in options.__dict__.items():
    print >> log_fh, "%s %s" % name_value
print >> log_fh

#
# create the config parser object and read in the ini file
#

config_parser = ConfigParser.ConfigParser()
config_parser.read(options.config_file)

#
# initialize lalapps.power and lalapps.cosmicstring modules
#

power.init_job_types(config_parser,
                     job_types=("datafind", "binj", "lladd", "binjfind",
                                "burca", "sqlite"))
cosmicstring.init_job_types(config_parser,
                            job_types=("string", "meas_likelihood",
                                       "calc_likelihood", "runsqlite"))

#
# make directories to store the cache files, job logs, and trigger output
Example #32
0
    def newSession(self):
        start_time = time()

        print "Starting a new BitGrail session with Captcha."
        Config = ConfigParser.ConfigParser()
        Config.read("./.settings.ini")

        email = Config.get("BitGrail", 'email')
        password = Config.get("BitGrail", 'password')

        API_KEY_2CAPTCHA = Config.get("2captcha", '2CAPCHA_API_KEY')
        recapcha_sitekey = Config.get("BitGrail",
                                      'recapcha_sitekey')  # bitgrail

        submit_url = "https://bitgrail.com/login"

        # http://scraping.pro/2captcha-service-to-solve-recaptcha-2/
        # send credentials to the service to solve captcha
        # returns service's captcha_id of captcha to be solved
        url = "http://2captcha.com/in.php?key=" + API_KEY_2CAPTCHA + "&method=userrecaptcha&googlekey=" + recapcha_sitekey + "&pageurl=" + submit_url
        resp = requests.get(url)
        if resp.text[0:2] != 'OK':
            print(resp.text)
            quit('Error. Captcha is not received')
        captcha_id = resp.text[3:]
        # print("Captcha ID = ",captcha_id)
        if (captcha_id[0:3] == 'OR_'):
            quit("Error: " + captcha_id)

        # fetch ready 'g-recaptcha-response' token for captcha_id
        fetch_url = "http://2captcha.com/res.php?key=" + API_KEY_2CAPTCHA + "&action=get&id=" + captcha_id

        print "Waiting for captcha to be solved by mechanical Turk..."
        captcha_ok = False
        for i in range(1, 200):
            sleep(1)
            resp = requests.get(fetch_url)
            print ".",
            sys.stdout.flush()
            if resp.text[0:2] == 'OK':
                captcha_ok = True
                break
        if (captcha_ok != True):
            quit("Captcha isn't OK after " + (time() - start_time) + " s: " +
                 resp.text)

        print('\nDone. Time to solve captcha: ', time() - start_time)

        headers = {
            'user-agent':
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36',
            'referrer': 'https://bitgrail.com/login',
            'x-requested-with': 'XMLHttpRequest'
        }
        # POST parameters, might be more, depending on form content

        # resp = requests.post(submit_url, headers=headers, files=payload)
        self._session_ = requests.Session()
        self._session_.get("https://bitgrail.com/")

        payload = {
            'email': (None, email),
            'ajax': (None, '1'),
            'password': (None, password),
            'login': (None, '1'),
            'submit': (None, 'Login'),
            'g-recaptcha-response': (None, resp.text[3:])
        }
        resp = self._session_.post(submit_url, headers=headers, files=payload)
        # print(resp.text)

        if (not resp.text.find('icon-ok-circled')):
            print "BitGrail: printing full dump or response"
            # data = dump.dump_response(resp)
            # print(data.decode('utf-8'))
            print resp.text
            quit("No valid response from login.")

        submit_url = "https://bitgrail.com/login2fa"
        two_factor_token = raw_input("Enter your 2FA code: ").strip()
        print("You entered: " + two_factor_token)

        payload = {
            'token2fa': (None, two_factor_token),
            'login2fa': (None, '1'),
            'ajax': (None, '1')
        }
        resp = self._session_.post(submit_url, headers=headers, files=payload)
        # print(resp.text)

        if (not resp.text.find('icon-ok-circled')):
            print "BitGrail: printing full dump or response"
            # data = dump.dump_response(resp)
            # print(data.decode('utf-8'))
            print resp.text
            quit("No valid response from login.")

        sdata = self.store_session()
        with open("bg_session.dump", "w") as f:
            f.write(sdata)
        return self._session_
Example #33
0
 def __init__(self, login_configfile, charset='utf8'):
     self.charset = charset
     self.configparser = ConfigParser.RawConfigParser()
     self.configparser.readfp(open(login_configfile))
     self.loginfo = dict(self.configparser.items('database'))
Example #34
0
    def create_collector_config(self, workdir):
        """ Telegraf collector config,
        toml format

        """
        cfg_path = "agent_collector_{}.cfg".format(self.host)
        if os.path.isfile(cfg_path):
            logger.info(
                'Found agent config file in working directory with the same name as created for host %s.\n'
                'Creating new one via tempfile. This will affect predictable filenames for agent artefacts',
                self.host)
            handle, cfg_path = tempfile.mkstemp('.cfg', 'agent_collector_')
            os.close(handle)

        self.monitoring_data_output = "{remote_folder}/monitoring.rawdata".format(
            remote_folder=workdir)

        defaults_old_enabled = ['CPU', 'Memory', 'Disk', 'Net', 'System']

        try:
            config = ConfigParser.RawConfigParser()

            config.add_section("global_tags")
            config.add_section("agent")
            config.set(
                "agent",
                "interval",
                "'{interval}s'".format(interval=self.interval))
            config.set("agent", "round_interval", "true")
            config.set("agent", "flush_interval", "'1s'")
            config.set("agent", "collection_jitter", "'0s'")
            config.set("agent", "flush_jitter", "'1s'")

            for section in self.host_config.keys():
                # telegraf-style config
                if not self.old_style_configs:
                    config.add_section(
                        "{section_name}".format(
                            section_name=self.host_config[section]['name']))
                    for key, value in iteritems(self.host_config[section]):
                        if key != 'name':
                            config.set(
                                "{section_name}".format(
                                    section_name=self.host_config[section][
                                        'name']),
                                "{key}".format(key=key),
                                "{value}".format(value=value))
                # monitoring-style config
                else:
                    if section in defaults_old_enabled:
                        config.add_section(
                            "{section_name}".format(
                                section_name=self.host_config[section]['name']))
                        for key, value in iteritems(self.host_config[section]):
                            if key in [
                                    'fielddrop', 'fieldpass', 'percpu',
                                    'devices', 'interfaces'
                            ]:
                                config.set(
                                    "{section_name}".format(
                                        section_name=self.host_config[section][
                                            'name']),
                                    "{key}".format(key=key),
                                    "{value}".format(value=value))

            # outputs
            config.add_section("[outputs.file]")
            config.set(
                "[outputs.file]",
                "files",
                "['{config}']".format(config=self.monitoring_data_output))
            config.set("[outputs.file]", "data_format", "'json'")

            with open(cfg_path, 'w') as fds:
                config.write(fds)

            # dirty hack, this allow to avoid bash escape quoting, we're pushing shell script w/ arguments
            # index of argument is index of custom metric in our config
            inputs = ""
            for idx, cmd in enumerate(self.custom):
                inputs += "[[inputs.exec]]\n"
                inputs += "commands = ['/bin/sh {workdir}/agent_customs.sh -{idx}']\n".format(
                    workdir=workdir, idx=idx)
                inputs += "data_format = 'value'\n"
                inputs += "data_type = 'float'\n"
                inputs += "name_prefix = '{}_'\n\n".format(cmd.get('label'))
                if cmd['diff']:
                    decoder.diff_metrics['custom'].append(
                        decoder.find_common_names(cmd.get('label')))

            with open(cfg_path, 'a') as fds:
                fds.write(inputs)

            # telegraf raw configuration into xml
            telegraf_raw = ""
            for element in self.telegrafraw:
                telegraf_raw += element

            with open(cfg_path, 'a') as fds:
                fds.write(telegraf_raw)

        except Exception as exc:
            logger.error(
                'Error trying to create monitoring config. Malformed? %s',
                exc,
                exc_info=True)
        return cfg_path
Example #35
0
def send_sms(message):
    #read parameters that is needed to send sms
    from_number = config_parser.get("SMS_SECTION", "from_number")
    to_number = config_parser.get("SMS_SECTION", "to_number")
    twilio_account_number = config_parser.get("SMS_SECTION", "twilio_account_number")
    twilio_account_token = config_parser.get("SMS_SECTION", "twilio_account_token")

    client = TwilioRestClient(twilio_account_number, twilio_account_token)
    client.messages.create(to=to_number, from_=from_number,
                        body=message)


if __name__ == '__main__':
    #read and initialize from config
    config_file = os.path.join(os.path.pardir, "config", "Config.txt")
    config_parser = ConfigParser.RawConfigParser()
    config_parser.read(config_file)

    #initialize logger
    logger = logging.getLogger(__name__)
    logger.setLevel(logging.DEBUG)

    # create a file handler
    handler = logging.FileHandler(os.path.join(os.path.pardir, "log", "logger.log"))
    handler.setLevel(logging.DEBUG)

    # create a logging format
    formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    handler.setFormatter(formatter)

    logger.addHandler(handler)
Example #36
0
    def __init__(self,confFile):
        self.parser = argparse.ArgumentParser()
        self.parser.add_argument(confFile,help="Need a configuration file.")
        self.args = self.parser.parse_args()
        self.inargs = {}


###########  Configuration ############

        self.Config = ConfigParser.ConfigParser()
        self.configFile = self.args.confFile 
        self.Config.read(self.configFile)
        self.template_file = self.ConfigSectionMap("General")['template']
        self.exp_template_file = self.ConfigSectionMap("General")['exp_template']
        self.chiplist = self.ConfigSectionMap("General")['chiplist']

        self.data_dir = self.ConfigSectionMap("General")['data_dir']
        self.data_file = self.ConfigSectionMap("General")['data_file']
        self.data_conf = self.ConfigSectionMap("General")['conf_dir']
        self.correc_dir =  self.ConfigSectionMap("General")['corr_dir']
        self.year =  self.ConfigSectionMap("General")['year']
        self.yearb =  self.ConfigSectionMap("General")['yearb']
        self.epoch =  self.ConfigSectionMap("General")['epoch']
        self.epochb =  self.ConfigSectionMap("General")['epochb']
        self.FILTER = self.ConfigSectionMap("General")['filter']
        #--------------------------------------------------------#
        # crosstalk                                              #
        #--------------------------------------------------------#
        self.xtalk_file = self.ConfigSectionMap("crosstalk")['xtalk']
        self.xtalk_template = self.ConfigSectionMap("crosstalk")['template']
        self.replace_file = self.ConfigSectionMap("crosstalk")['replace']
        #-----------------------------------------------------------#
        #             pixcorrect                                    #
        #-----------------------------------------------------------#
        self.bias = self.ConfigSectionMap("pixcorrect")['bias']
        self.bpm = self.ConfigSectionMap("pixcorrect")['bpm']
        self.linearity = self.ConfigSectionMap("pixcorrect")['linearity']
        self.bf = self.ConfigSectionMap("pixcorrect")['bf']
        self.flat =self.ConfigSectionMap("pixcorrect")['flat']
        self.copy_from_Dcache(self.correc_dir+'lin_'+str(self.yearb)+'/'+self.linearity)
        self.copy_from_Dcache(self.correc_dir+'bf_'+str(self.yearb)+'/'+self.bf)
        #---------------------------------------#
        #            skyCombine                 #
        #---------------------------------------#
        self.PCFILENAMEPREFIX = self.ConfigSectionMap("skyCombineFit")['pcafileprefix']
        self.PCFILENAME = self.PCFILENAMEPREFIX
        #   starflat  #
        self.starflat = self.ConfigSectionMap("starflat")['starflat']
        # skySubtract
        self.pc_filename  =  self.ConfigSectionMap("skysubtract")['pcfilename']
        self.weight = self.ConfigSectionMap("skysubtract")['weight']
        #------------------------------------------#
        #              scamp                       #
        #------------------------------------------#
        self.imagflags = self.ConfigSectionMap("scamp")['imagflags']
        self.flag_mask = self.ConfigSectionMap("scamp")['flag_mask']
        self.flag_astr = self.ConfigSectionMap("scamp")['flag_astr']
        self.catalog_ref = self.ConfigSectionMap("scamp")['catalog_ref']
        self.default_scamp = self.ConfigSectionMap("scamp")['default_scamp']
        self.head_file = self.ConfigSectionMap("scamp")['head']
        self.farg = {'filter':self.FILTER}
        self.head_FILE =  self.head_file.format(**self.farg)
        #
        self.copy_from_Dcache(self.data_conf+self.default_scamp)
        self.copy_from_Dcache(self.data_conf+self.head_FILE)
        #-----------------------------------------#
        #             sextractor                  #
        #-----------------------------------------#
        self.sexnnwFile = self.ConfigSectionMap("sextractor")['starnnw_name']
        self.sexconvFile = self.ConfigSectionMap("sextractor")['filter_name']
        self.sexparamFile  = self.ConfigSectionMap("sextractor")['parameters_name']
        self.sexparamPSFEXFile  =  self.ConfigSectionMap("sextractor")['parameters_name_psfex']
        self.configFile = self.ConfigSectionMap("sextractor")['configfile']
        self.confPSF = self.ConfigSectionMap("sextractor")['sexforpsfexconfigfile']

        self.copy_from_Dcache(self.data_conf+self.sexnnwFile)
        self.copy_from_Dcache(self.data_conf+self.sexconvFile)
        self.copy_from_Dcache(self.data_conf+self.sexparamFile)
        self.copy_from_Dcache(self.data_conf+self.sexparamPSFEXFile) 
        self.copy_from_Dcache(self.data_conf+self.configFile)
        self.copy_from_Dcache(self.data_conf+'default.psf')
        self.copy_from_Dcache(self.data_conf+self.confPSF)
        self.copy_from_Dcache(self.data_conf+'sex.param_bkg')
        self.sexbkgparamFile='sex.param_bkg'
        #----------------------------------------#
        #              psfex                     #   
        #----------------------------------------#
        self.config_filePSF =  self.ConfigSectionMap("psfex")['configfile']
        self.copy_from_Dcache(self.data_conf+self.config_filePSF)

        
        #-----------------------------------------#
        #       sextractor  with   psf            #
        #-----------------------------------------#
        self.sexparamFile_2  = self.ConfigSectionMap("sextractor")['parameters_name2']
        self.configFile2 = self.ConfigSectionMap("sextractor")['configfile2']
        self.sexconvFile2 = self.ConfigSectionMap("sextractor")['filter_name2']

        self.copy_from_Dcache(self.data_conf+self.sexconvFile2)
        self.copy_from_Dcache(self.data_conf+self.sexparamFile_2)
        self.copy_from_Dcache(self.data_conf+self.configFile2)
        self.copy_from_Dcache(self.data_conf+'default.psf')
Example #37
0
    def load_config(self, config):
        """ Load the configuration file and parse entries, when encountering a issue
            change safe defaults """

        self.parser = ConfigParser.SafeConfigParser()
        self.parser.read(config)

        # Set some safe defaults
        self.opacity = 50
        self.button_theme = "default"
        self.bgcolor = gtk.gdk.color_parse("black")
        self.monitor = 0
        blist = ""

        # Check if we're using HAL, and init it as required.
        if self.parser.has_section("settings"):

            if self.parser.has_option("settings","backend"):
               self.backend = self.parser.get("settings","backend")
            else:
               self.backend = ""

            if self.parser.has_option("settings", "monitor"):
               self.monitor = self.parser.getint("settings", "monitor")

        if self.backend == "HAL" or self.backend == "ConsoleKit":
            from dbushandler import DbusController
            self.dbus = DbusController(self.backend)
            if self.dbus.check() == False:
               del self.dbus
               self.backend = ""
        else:
            self.backend = ""

        # Check the looks section and load the config as required
        if self.parser.has_section("looks"):

            if self.parser.has_option("looks", "opacity"):
                self.opacity = self.parser.getint("looks", "opacity")

            if self.parser.has_option("looks","buttontheme"):
                self.button_theme = self.parser.get("looks", "buttontheme")

            if self.parser.has_option("looks", "bgcolor"):
            	try:
                	self.bgcolor = gtk.gdk.color_parse(self.parser.get("looks", "bgcolor"))
                except:
                	self.logger.warning(_("Color %s is not a valid color, defaulting to black") % self.parser.get("looks", "bgcolor"))
                	self.bgcolor = gtk.gdk.color_parse("black")

            if self.parser.has_option("looks", "opacity"):
                blist = self.parser.get("looks", "buttons")

        # Parse shortcuts section and load them into a array for later reference.
	    if self.parser.has_section("shortcuts"):
	        self.shortcut_keys = self.parser.items("shortcuts")
	        self.logger.debug("Shortcut Options: %s" % self.shortcut_keys)


        # Parse in commands section of the configuration file. Check for valid keys and set the attribs on self
        if self.parser.has_section("commands"):
            for key in self.parser.items("commands"):
                self.logger.debug("Setting cmd_%s as %s" % (key[0], key[1]))
                if key[0] in ['logout', 'restart', 'shutdown', 'suspend', 'hibernate', 'safesuspend', 'lock', 'switch']:
                    if key[0]: setattr(self, "cmd_" + key[0], key[1])

        # Load theme information from local directory if local mode is set
        if self.local_mode:
            self.theme_prefix = "./data/themes"
        else:
            self.theme_prefix = "%s/share/themes" % sys.prefix

        self.img_path = "%s/%s/oblogout" % (self.theme_prefix, self.button_theme)

        if os.path.exists("%s/.themes/%s/oblogout" % (os.environ['HOME'], self.button_theme)):
            # Found a valid theme folder in the userdir, use that
            self.img_path = "%s/.themes/%s/oblogout" % (os.environ['HOME'], self.button_theme)
            self.logger.info("Using user theme at %s" % self.img_path)
        else:
            if not os.path.exists("%s/%s/oblogout" % (self.theme_prefix, self.button_theme)):
                self.logger.warning("Button theme %s not found, reverting to default" % self.button_theme)
                self.button_theme = 'foom'


        # Parse button list from config file.
        validbuttons = ['cancel', 'logout', 'restart', 'shutdown', 'suspend', 'hibernate', 'safesuspend', 'lock', 'switch']
        buttonname = [_('cancel'), _('logout'), _('restart'), _('shutdown'), _('suspend'), _('hibernate'), _('safesuspend'), _('lock'), _('switch')]

        if not blist:
            list = validbuttons
        elif blist == "default":
            list = validbuttons
        else:
            list = map(lambda button: string.strip(button), blist.split(","))

        # Validate the button list
        for button in list:
            if not button in validbuttons:
                self.logger.warning(_("Button %s is not a valid button name, removing") % button)
                list.remove(button)
            else:
                if self.backend:
                    if not self.dbus.check_ability(button):
                        self.logger.warning(_("Can't %s, disabling button" % button))
                        list.remove(button)

        if len(list) == 0:
            self.logger.warning(_("No valid buttons found, resetting to defaults"))
            self.button_list = validbuttons
        else:
            self.logger.debug("Validated Button List: %s" % list)
            self.button_list = list
Example #38
0
class HooksConfig(object):

    """
    Gets and manages the bridge configuration file.
    """

    __instance = None

    _ = None
    _config = ConfigParser.ConfigParser()
    _CONFIG_FILE = "hooks_config" + os.sep + "bridge.cfg"

    @staticmethod
    def get_default_locale():

        return ('en_US', 'UTF8')

    @staticmethod
    def get_translations():

        user_locale = locale.getdefaultlocale()

        if all(x is None for x in user_locale):

            user_locale = HooksConfig.get_default_locale()

        t = gettext.translation("Git2CC", "locale",
                                user_locale, fallback=True)

        if isinstance(t, gettext.NullTranslations):

            user_locale = HooksConfig.get_default_locale()
            t = gettext.translation("Git2CC", "locale",
                                    user_locale, fallback=True)

        t.install()

        return t.ugettext

    def __new__(cls, *args, **kargs):

        if cls.__instance is None:

            cls.__instance = object.__new__(cls, *args, **kargs)

        return cls.__instance

    def __init__(self):
        """
        Class constructor that gets and validates the configuration and sets
        the translation alias

        """

        # Load user messages
        self._ = HooksConfig.get_translations()

        # Read and validate configuration file
        self._config.readfp(open(self._CONFIG_FILE))
        self._validate_config()

    def _validate_config(self):
        """
        This procedure checks the configuration file and ensures every section,
        field, file and folder existence.

        """

        # Section validation
        if not self._config.has_section("cc_view"):

            raise ConfigException(self._("missing_section") + " cc_view")

        elif not self._config.has_section("cc_config"):

            raise ConfigException(self._("missing_section") + " cc_config")

        # cc_view section fields validation
        if not self._config.has_option("cc_view", "path"):

            raise ConfigException(self._("missing_field") + " path " +
                                  self._("in_section") + " cc_view.")

        elif not os.path.isdir(self.get_view()):

            raise ConfigException(self.get_view() +
                                  self._("folder_not_exists"))

        # cc_config section fields validation
        if not self._config.has_option("cc_config", "cleartool_path"):

            raise ConfigException(self._("missing_field") +
                                  " cleartool_path " +
                                  self._("in_section") + " cc_config.")

        elif not os.path.isfile(self.get_cleartool_path()):

            raise ConfigException(self.get_cleartool_path() +
                                  self._("file_not_exists"))

        if not self._config.has_option("cc_config", "cc_pusher_user"):

            raise ConfigException(self._("missing_field") +
                                  " cc_pusher_user " +
                                  self._("in_section") + " cc_config.")

    def get_view(self):
        """
        Path to ClearCase view to sync

        """

        return self._config.get("cc_view", "path")

    def get_cleartool_path(self):
        """
        Path to clear tool command

        """

        return self._config.get("cc_config", "cleartool_path")

    def get_cc_pusher_user(self):
        """
        Returns the user pushing from the ClearCase view to sync work with Git.
        When hooks detect this users, they avoid doing any other operation.

        """

        return self._config.get("cc_config", "cc_pusher_user")

    def get_sync_branches(self):
        """
        Returns the list of Git branches in sync with ClearCase

        """

        branches = []

        if not self._config.has_option("git_config", "sync_branches"):

            branches.append("master")

        else:

            branches = self._config.get("git_config",
                                        "sync_branches").split(',')
            branches = [x.strip() for x in branches]

        return branches

    def get_vobs(self):
        """
        Return the configured CC vobs

        """
        vobs = []

        vobs = self._config.get("cc_view", "vobs").split(' ')

        return vobs
#####matplotlib###############
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab

#####import for django database####
sys.path.append('./db')
import exec_sqlite
import query_mongo
import ml_util
from ml_util import *
import zip_feature_util 

CONF_FILE='../../app.config' # at the base dir of the web
config=ConfigParser.ConfigParser()
config.read(CONF_FILE)
libsvm_filename = config.get("machine_learning","libsvm_alldata_filename")
dnn_filename = config.get("machine_learning","dnn_alldata_filename")



def main():
    
    parser = ArgumentParser(description=__description__)
    parser.add_argument("-f", "--folder", type=str, metavar="folder of features", help="hdfs folder contains features", required=False)
    parser.add_argument("-n", "--data_fname", type=str, metavar="data file name", help="file name for sample dataset", required=False)
    parser.add_argument("-o", "--out", type=str, metavar="out figure folder", help="folder contains output", required=False)
    parser.add_argument("-r", "--row_id", type=str, metavar="row id", help="row_id number in the db", required=False)
    parser.add_argument("-w", "--fromweb", type=str, metavar="flag for web", help="flag for web", required=False)
    parser.add_argument("-pm", "--parameter", type=str, metavar="parameters in json", help="json string contains learning alg and parameter selection", required=False)
'''
Set the config variable.
'''

import ConfigParser as cp
import json

config = cp.RawConfigParser()
config.read('../data/config/config.cfg')

min_wdw_sz = json.loads(config.get("hog","min_wdw_sz"))
step_size = json.loads(config.get("hog", "step_size"))
orientations = config.getint("hog", "orientations")
pixels_per_cell = json.loads(config.get("hog", "pixels_per_cell"))
cells_per_block = json.loads(config.get("hog", "cells_per_block"))
visualize = config.getboolean("hog", "visualize")
normalize = config.getboolean("hog", "normalize")
pos_feat_ph = config.get("paths", "pos_feat_ph")
neg_feat_ph = config.get("paths", "neg_feat_ph")
model_path = config.get("paths", "model_path")
threshold = config.getfloat("nms", "threshold")
Example #41
0
def process(keyword, page, website):  #后面需要分类型

    siteconfs = os.listdir(
        os.path.dirname(os.path.abspath(__file__)) + '/siteconfs')
    if not website in siteconfs:
        print 'siteconf not found'
        return []
    confpath = os.path.dirname(
        os.path.abspath(__file__)) + '/siteconfs/' + website
    siteconf = ConfigParser.ConfigParser()
    siteconf.read(confpath)
    extractors = siteconf.sections()
    try:
        extractors = sorted(extractors, key=lambda d: int(d[-1]))
    except:
        pass
    urlBac = ''
    for extractor in extractors:
        url = siteconf.get(extractor, 'searchUrl')
        url = url.replace('${keyword}', keyword).replace('${page}', str(page))
        print url
        segmentCut = siteconf.get(extractor, 'segment')
        titleCut = siteconf.get(extractor, 'title')
        urlCut = siteconf.get(extractor, 'url')
        infoCuts = siteconf.get(extractor, 'info')
        urlinfos = []
        if urlBac == url:  #如果是一样的链接就不重复打开了
            pageBuf = ct.crawlerTool.getPage(
                url
            )  #print HTMLParser().unescape('&#183;').encode('unicode-escape').decode('string_escape')是乱码
        else:
            urlBac = url
            pageBuf = ct.crawlerTool.getPage(url)
        baseurl = '/'.join(url.split('/')[:3])
        pageBuf = urlHostParser.make_links_absolute(pageBuf, baseurl)
        segments = ct.crawlerTool.getXpath(segmentCut, pageBuf)
        if not segments:
            print 'no matched segments', website
            continue
        for segment in segments:
            try:
                urlinfo = {}
                urlinfo['url'] = ct.crawlerTool.getXpath(urlCut, segment)[0]

                title = HTMLParser().unescape(
                    ct.crawlerTool.extractorText(
                        ct.crawlerTool.getXpath(
                            titleCut, segment)[0]))  #好像不转str格式后面输出是乱码S
                #print title,HTMLParser().unescape(title)
                #print ct.crawlerTool.getXpath('//h2/a[1]', segment)#解码后&#183;好像变乱码了
                urlinfo['title'] = title
                #print title
                urlinfo['info'] = ''
                for infoCut in infoCuts.split(';'):
                    urlinfo['info'] += ' '.join(
                        ct.crawlerTool.getXpath(infoCut, segment))  #info 作拼接处理
                #print urlinfo['url'], urlinfo['title'], urlinfo['info']
                urlinfos.append(urlinfo)
            except Exception, e:
                traceback.print_exc()

        return {"urlinfos": urlinfos}
Example #42
0
def compare_ini(ini_dirs, ini_name):
    # list of configuration INI readers
    configs = list()
    common_prefix_len = len(os.path.commonprefix(ini_dirs))
    # iterate all ini files and make a list of (folder_name, ConfigParser) items
    for ini_dir in ini_dirs:
        # extract the first unique item of the path to use as an identifier of ini files set
        folder_name = ini_dir[common_prefix_len:].split(os.sep)[0]
        configs.append((folder_name, ConfigParser.RawConfigParser()))
        configs[-1][1].read(os.path.join(ini_dir, ini_name))

    # initialize exceptions from the corresponding exceptions *.ini
    exceptions = ConfigParser.RawConfigParser()
    file_name, file_ext = os.path.splitext(ini_name)
    exception_ini = file_name.strip('1234567890') + file_ext
    exception_ini = os.path.join('exceptions', exception_ini)
    if os.path.exists(exception_ini):
        exceptions.read(exception_ini)

    # build a united structure of all sections in all configs
    conf_structure = dict()
    result_table = Table(ini_name)
    header = list()
    header.append('')
    for folder_name, conf in configs:
        # compose the header of the result table
        header.append(folder_name)
        for section in conf.sections():
            if not conf_structure.has_key(section):
                conf_structure[section] = set()
            conf_structure[section] |= set(conf.options(section))
    header.append('status')
    result_table.add_header(header)
    # initializing statistic counters
    ignored_cnt = 0
    default_cnt = 0
    unequal_cnt = 0
    for section in conf_structure:
        section_exc = section.strip('1234567890')
        section_values = list()
        option_values = list()
        section_values.append(section)
        for folder_name, conf in configs:
            if conf.has_section(section):
                section_values.append('+')
            else:
                section_values.append('-')
        for option in conf_structure[section]:
            option_exc = option.strip('1234567890')
            value_exc = 'unequal'
            # if current option is marked 'ignore' in exceptions list - don't check it
            if exceptions.has_section(section_exc):
                if exceptions.has_option(section_exc, option_exc):
                    value_exc = exceptions.get(section_exc, option_exc)
            values = list()
            values.append('  ' + str(option))
            for folder_name, conf in configs:
                if conf.has_section(section):
                    if conf.has_option(section, option):
                        values.append(conf.get(section, option))
                    else:
                        values.append('-')
                else:
                    values.append('-')
            # if at least 2 different values are found - add a row to the table
            if len(set(values)) > 2:
                values.append(value_exc)
                option_values.append(values)
            # else:
            #     equal_cnt += 1
        # add to output: name of the section and its state (present/absent) for each conf
        if len(set(section_values)) > 2 or len(option_values):
            result_table.add_row(Row(section_values, align=Alignment.CENTER))
            for ov in option_values:
                value_exc = ov[-1]
                color = 'Default'
                if value_exc == 'unequal':
                    color = 'Red'
                    unequal_cnt += 1
                else:
                    if value_exc == 'ignore':
                        color = 'Grey'
                        ignored_cnt += 1
                    else:
                        if value_exc == 'default':
                            color = 'Blue'
                            default_cnt += 1
                result_table.add_row(Row(ov, font=color))
    result_table.add_sep()
    if ignored_cnt or default_cnt or unequal_cnt:
        result_table.add_total('Ignored:', ignored_cnt)
        result_table.add_total('Default:', default_cnt)
        result_table.add_total('Unequal:', unequal_cnt)
    result_table.output()
    return bool(unequal_cnt)
Example #43
0
def pymelLogFileConfig(fname, defaults=None, disable_existing_loggers=False):
    """
    Reads in a file to set up pymel's loggers.

    In most respects, this function behaves similarly to logging.config.fileConfig -
    consult it's help for details. In particular, the format of the config file
    must meet the same requirements - it must have the sections [loggers],
    [handlers], and [fomatters], and it must have an entry for [logger_root]...
    even if not options are set for it.

    It differs from logging.config.fileConfig in the following ways:

    1) It will not disable any pre-existing loggers which are not specified in
    the config file, unless disable_existing_loggers is set to True.

    2) Like logging.config.fileConfig, the default behavior for pre-existing
    handlers on any loggers whose settings are specified in the config file is
    to remove them; ie, ONLY the handlers explicitly given in the config will
    be on the configured logger.
    However, pymelLogFileConfig provides the ability to keep pre-exisiting
    handlers, by setting the 'remove_existing_handlers' option in the appropriate
    section to True.
    """
    cp = ConfigParser.ConfigParser(defaults)
    if hasattr(cp, 'readfp') and hasattr(fname, 'readline'):
        cp.readfp(fname)
    else:
        cp.read(fname)

    formatters = logging.config._create_formatters(cp)

    # _install_loggers will remove all existing handlers for the
    # root logger, and any other handlers specified... to override
    # this, save the existing handlers first
    root = logging.root
    root_logger_level = root.level  # restore root level below if NOTSET
    # make sure you get a COPY of handlers!
    rootHandlers = root.handlers[:]
    oldLogHandlers = {}

    # Don't use getLogger while iterating through loggerDict, as that
    # may actually create a logger, and change the size of the dict
    # ...instead, just ignore any PlaceHolder instances we get, as they
    # won't have any handlers to worry about anyway
    # thanks to pierre.augeard for pointing this one out
    for loggerName, logger in root.manager.loggerDict.iteritems():
        # Make sure it's not a PlaceHolder
        if isinstance(logger, logging.Logger):
            # make sure you get a COPY of handlers!
            oldLogHandlers[loggerName] = logger.handlers[:]

    # critical section
    logging._acquireLock()
    try:
        # Handlers add themselves to logging._handlers
        handlers = logging.config._install_handlers(cp, formatters)

        if sys.version_info >= (2, 6):
            logging.config._install_loggers(cp,
                                            handlers,
                                            disable_existing_loggers=0)
        else:
            logging.config._install_loggers(cp, handlers)
            # The _install_loggers function disables old-loggers, so we need to
            # re-enable them
            for k, v in logging.root.manager.loggerDict.iteritems():
                if hasattr(v, 'disabled') and v.disabled:
                    v.disabled = 0

        # Now re-add any removed handlers, if needed
        secNames = cp.get('loggers', 'keys').split(',')
        secNames = ['logger_' + x.strip() for x in secNames]
        _addOldHandlers(root, rootHandlers, 'logger_root', cp)
        for secName in secNames:
            if secName == 'logger_root':
                logger = root
                oldHandlers = rootHandlers
            else:
                logName = cp.get(secName, 'qualname')
                logger = logging.getLogger(logName)
                oldHandlers = oldLogHandlers.get(logName)
            if oldHandlers:
                _addOldHandlers(logger, oldHandlers, secName, cp)

        # if root logger level not explicitly set in the pymel.conf file,
        # then set it back to the original value.  The root logger always
        # has to have a level.
        if logging.root.level == logging.NOTSET:
            logging.root.setLevel(root_logger_level)

    finally:
        logging._releaseLock()
Example #44
0
import Queue
import threading
import time

import bottle
import audiere

from bottle import route, view, request
from ..dtmf import tonegen as dtmf
from ..nvv import core

socket.setdefaulttimeout(0.5)

parser = ConfigParser.ConfigParser(
    dict(remote_ip='192.168.1.254',
         remote_port='1024',
         debug='0',
         server_port='8080',
         callsign='0'))

parser.read(
    ["settings.ini",
     os.path.join(os.path.dirname(__file__), 'settings.ini')])

if not parser.has_section("Server"):
    parser.add_section("Server")

bottle.debug(parser.getboolean('Server', 'debug'))

CALL_FILE = os.path.join(os.path.dirname(__file__), 'call.wav')

REMOTE = (parser.get('Server',
Example #45
0
    def __init__(self, config_file, resume_training=True, resume_epoch=None, predict_length=None, multi_gpu=False):
        self.config = ConfigParser.ConfigParser(allow_no_value=True)
        try:
            self.config.readfp(open(config_file))
        except:
            print('Could not read configuration file {} - exiting.'.format(config_file))
            sys.exit(1)
        # Get General Configuration
        self.train_multi_gpu = multi_gpu
        self.resume_training = resume_training
        self.resume_epoch = resume_epoch
        self.keras_verbose = self.config.getint('general', 'keras_verbose')
        self.seed = self.config.getint('general', 'seed')
        if self.seed is None:
            self.seed = 42
        # Get Model Configuration
        self.data_dir = self.config.get('model', 'data_dir')
        self.data_dir_structure = self.config.get('model', 'data_dir_structure')
        self.model_dir = self.config.get('model', 'model_dir')
        if len(self.model_dir) == 0:
            self.model_dir = None
        self.sample_rate = self.config.getint('model', 'sample_rate')
        self.debug = self.config.getint('model', 'debug')
        # Training Configuration
        self.max_epoch = self.config.getint('training', 'max_epoch')
        self.test_factor = self.config.getfloat('training', 'test_factor')
        self.batch_size = self.config.getint('training', 'batch_size')
        self.output_bins = self.config.getint('training', 'output_bins')
        self.filters = self.config.getint('training', 'filters')
        self.dilation_depth = self.config.getint('training', 'dilation_depth')
        self.stacks = self.config.getint('training', 'stacks')
        self.use_bias = self.config.getboolean('training', 'use_bias')
        self.use_ulaw = self.config.getboolean('training', 'use_ulaw')
        self.res_l2 = self.config.getint('training', 'res_l2')
        self.final_l2 = self.config.getint('training', 'final_l2')
        self.initial_fragment_length = self.config.getint('training', 'initial_fragment_length')
        self.fragment_stride = self.config.getint('training', 'fragment_stride')
        self.use_skip_connections = self.config.getboolean('training', 'use_skip_connections')
        self.learn_all_outputs = self.config.getboolean('training', 'learn_all_outputs')
        self.random_train_batches = self.config.getboolean('training', 'random_train_batches')
        self.randomize_batch_order = self.config.getboolean('training', 'randomize_batch_order')
        self.train_only_in_receptive_field = self.config.getboolean('training', 'train_only_in_receptive_field')
        self.train_with_soft_targets = self.config.getboolean('training', 'train_with_soft_targets')
        self.soft_target_stdev = self.config.getfloat('training', 'soft_target_stdev')
        self.optimizer = self.config.get('training', 'optimizer')
        self.early_stopping_patience = self.config.getint('training', 'early_stopping_patience')
        # Prediction Configuration
        self.predict_length = self.config.getfloat('prediction', 'predict_length')
        # Let's allow the user to overwrite the length via cmd-line, it is more practical :-)
        if predict_length is not None:
            self.predict_length = predict_length
        self.sample_argmax = self.config.getboolean('prediction', 'sample_argmax')
        self.sample_temperature = self.config.getfloat('prediction', 'sample_temperature')
        self.predict_initial_input = self.config.get('prediction', 'initial_input')
        if len(self.predict_initial_input) == 0:
            self.predict_initial_input = None
        self.predict_use_softmax_as_input = self.config.getboolean('prediction', 'use_softmax_as_input')
        self.sample_seed = self.seed
        np.random.seed(self.seed)
        self.rnd = np.random.RandomState(self.seed)

        self.fragment_length = self.initial_fragment_length + self._compute_receptive_field2(self.sample_rate, self.dilation_depth, self.stacks)[0]
        # Additional Settings
        self.num_gpus = 1
        self.train_rank = 0
        if self.train_multi_gpu:
            self.train_rank = hvd.rank()
            self.num_gpus = hvd.size()
        print('rank = {}, num_gpu={}'.format(self.train_rank, self.num_gpus))
        self.dataset = DataSet(self.config, self.fragment_length, self.num_gpus, self.train_rank)
Example #46
0
def read_plat(env, args, fn):
    cfig = ConfigParser.ConfigParser(env.Dictionary())
    cfig.read(fn)
    buildvars = [
        [
            'CC',
            'TIFFINCLUDE',  # where to find preinstalled tifflib headers
            'TIFFLIB',  # where to find a preinstalled tifflib library
        ],  # replace
        [
            'CPPPATH',
            'CPPDEFINES',
            'CPPFLAGS',
            'CCFLAGS',
            'LIBPATH',
            'LINKFLAGS',
            'EZXML_CPPDEFINES',  # build flags specific to ezxml.c
        ]
    ]  # append
    vars = [
        [
            'install',
            ['RAD_BASEDIR', 'RAD_BINDIR', 'RAD_RLIBDIR', 'RAD_MANDIR'], []
        ],
        [
            'code',
            [  # replace
            ],
            [  # append
                'RAD_COMPAT',  # theoretically obsolete (src/common/strcmp.c)
                'RAD_MATHCOMPAT',  # erf.c floating point error function
                'RAD_ARGSCOMPAT',  # fixargv0.c for Windows
                'RAD_NETCOMPAT',  # [win_]netproc.c for ranimate
                'RAD_MLIB',  # usually 'm', or any fastlib available
                'RAD_SOCKETLIB',  # ws_2_32 on Windows (VC links it automatically)
                'RAD_PROCESS',  # our process abstraction and win_popen()
                'RAD_PCALLS',  # more custom process abstraction
            ]
        ],
    ]
    if args.get('RAD_DEBUG', 0):
        vars.insert(0, ['debug'] + buildvars)
    else:
        vars.insert(0, ['build'] + buildvars)
    for section in vars:
        if cfig.has_section(section[0]):
            for p in section[1]:  # single items to replace
                try:
                    v = cfig.get(section[0], p)
                except ConfigParser.NoOptionError:
                    continue
                env[p] = v
                #print '%s: %s' % (p, env[p])
            for p in section[2]:  # multiple items to append
                try:
                    v = cfig.get(section[0], p)
                except ConfigParser.NoOptionError:
                    continue
                apply(env.Append, [], {p: env.Split(v)})
    # XXX Check that basedir exists.
    for k in ['RAD_BINDIR', 'RAD_RLIBDIR', 'RAD_MANDIR']:
        if (env.has_key('RAD_BASEDIR') and env.has_key(k)
                and not os.path.isabs(env[k])):
            env[k] = os.path.join(env['RAD_BASEDIR'], env[k])
Example #47
0
        sys.exit(0)
    if '-h' in sys.argv or '--help' in sys.argv:
        print __helpmsg__
        sys.exit(0)

    options = {'d':'debug', 'f':'full', 'l':'long', 'version':'version',
               'q':'q', 'reverse':'reverse'}
    doptions = {'header':'header', 'sort':'sort', 'u':'user'}
    (opts, args) = Cobalt.Util.dgetopt_long(sys.argv[1:], options,
                                            doptions, __helpmsg__)

    # check for custom header, first in cobalt.conf, env, then in --header
    custom_header = None
    custom_header_full = None
    try:
        CP = ConfigParser.ConfigParser()
        CP.read(Cobalt.CONFIG_FILES)
        custom_header = CP.get('cqm', 'cqstat_header').split(':')
    except:
        pass
        
    try:
        custom_header_full = CP.get('cqm', 'cqstat_header_full').split(':')
    except:
        pass
    if 'QSTAT_HEADER' in os.environ.keys():
        custom_header = os.environ['QSTAT_HEADER'].split(':')
    elif 'CQSTAT_HEADER' in os.environ.keys():
        custom_header = os.environ['CQSTAT_HEADER'].split(':')
    if 'QSTAT_HEADER_FULL' in os.environ.keys():
        custom_header_full = os.environ['QSTAT_HEADER_FULL'].split(':')
  def run_pants_with_workdir_without_waiting(self, command, workdir, config=None, extra_env=None,
                                             build_root=None, print_exception_stacktrace=True,
                                             **kwargs):
    args = [
      '--no-pantsrc',
      '--pants-workdir={}'.format(workdir),
      '--kill-nailguns',
      '--print-exception-stacktrace={}'.format(print_exception_stacktrace),
    ]

    if self.hermetic():
      args.extend(['--pants-config-files=[]',
                   # Turn off cache globally.  A hermetic integration test shouldn't rely on cache,
                   # or we have no idea if it's actually testing anything.
                   '--no-cache-read', '--no-cache-write',
                   # Turn cache on just for tool bootstrapping, for performance.
                   '--cache-bootstrap-read', '--cache-bootstrap-write'
                   ])

    if config:
      config_data = config.copy()
      ini = ConfigParser.ConfigParser(defaults=config_data.pop('DEFAULT', None))
      for section, section_config in config_data.items():
        ini.add_section(section)
        for key, value in section_config.items():
          ini.set(section, key, value)
      ini_file_name = os.path.join(workdir, 'pants.ini')
      with safe_open(ini_file_name, mode='w') as fp:
        ini.write(fp)
      args.append('--pants-config-files=' + ini_file_name)

    pants_script = os.path.join(build_root or get_buildroot(), self.PANTS_SCRIPT_NAME)

    # Permit usage of shell=True and string-based commands to allow e.g. `./pants | head`.
    if kwargs.get('shell') is True:
      assert not isinstance(command, list), 'must pass command as a string when using shell=True'
      pants_command = ' '.join([pants_script, ' '.join(args), command])
    else:
      pants_command = [pants_script] + args + command

    # Only whitelisted entries will be included in the environment if hermetic=True.
    if self.hermetic():
      env = dict()
      for h in self.hermetic_env_whitelist():
        env[h] = os.getenv(h) or ''
      hermetic_env = os.getenv('HERMETIC_ENV')
      if hermetic_env:
        for h in hermetic_env.strip(',').split(','):
          env[h] = os.getenv(h)
    else:
      env = os.environ.copy()
    if extra_env:
      env.update(extra_env)

    # Don't overwrite the profile of this process in the called process.
    # Instead, write the profile into a sibling file.
    if env.get('PANTS_PROFILE'):
      prof = '{}.{}'.format(env['PANTS_PROFILE'], self._get_profile_disambiguator())
      env['PANTS_PROFILE'] = prof
      # Make a note the subprocess command, so the user can correctly interpret the profile files.
      with open('{}.cmd'.format(prof), 'w') as fp:
        fp.write(b' '.join(pants_command))

    return pants_command, subprocess.Popen(pants_command, env=env, stdin=subprocess.PIPE,
      stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
Example #49
0
File: config.py Project: js297/iris
            path = c_path
        else:
            msg = 'Ignoring config item {!r}:{!r} (section:option) as {!r}' \
                  ' is not a valid directory path.'
            warnings.warn(msg.format(section, option, c_path))
    return path


# Figure out the full path to the "iris" package.
ROOT_PATH = os.path.abspath(os.path.dirname(__file__))

# The full path to the configuration directory of the active Iris instance.
CONFIG_PATH = os.path.join(ROOT_PATH, 'etc')

# Load the optional "site.cfg" file if it exists.
config = ConfigParser.SafeConfigParser()
config.read([os.path.join(CONFIG_PATH, 'site.cfg')])


##################
# Resource options
_RESOURCE_SECTION = 'Resources'


SAMPLE_DATA_DIR = get_dir_option(
    _RESOURCE_SECTION, 'sample_data_dir',
    default=os.path.join(os.path.dirname(__file__), 'sample_data'))

TEST_DATA_DIR = get_dir_option(_RESOURCE_SECTION, 'test_data_dir',
                               default=os.path.join(os.path.dirname(__file__),
                                                    'test_data'))
import ConfigParser
import re
import sys
class initAutomation(object):

    def __init__(self,trafficProfile):
        #pass
        self.trafficProfile=trafficProfile
        print trafficProfile
    def updateConfigFile(self,trafficProfile,gemuConfigfile):
        gemuConfigfile.set('Test_Case_Details','traffic_profile',trafficProfile[1])
	gemuConfigfile.set('Test_Environment_GEMU','logFolder',trafficProfile[2])
        with open('/home/Automation/scripts/testConfiguration.ini', 'w') as configFile:
            gemuConfigfile.write(configFile)
if __name__ == "__main__":
    gemuConfigfile = ConfigParser.ConfigParser()
    gemuConfigfile.read('/home/Automation/scripts/testConfiguration.ini')
    trafficProfile=sys.argv
    testCase=initAutomation(trafficProfile)
    testCase.updateConfigFile(trafficProfile,gemuConfigfile)	
Example #51
0
# @{
##

import time
import os
from oeqa.runtime.wifi import wifi
import string
try:
 import ConfigParser
except:
 import configparser as ConfigParser
from oeqa.oetest import oeRuntimeTest
from oeqa.utils.helper import shell_cmd_timeout
from oeqa.utils.decorators import tag

ssid_config = ConfigParser.ConfigParser()
config_path = os.path.join(os.path.dirname(__file__), "files/config.ini")
ssid_config.readfp(open(config_path))

@tag(TestType="FVT")
class CommWiFiConect(oeRuntimeTest):
    """
    @class CommWiFiConect
    """
    def setUp(self):
        ''' initialize wifi class
        @fn setUp
        @param self
        @return
        '''
        self.wifi = wifi.WiFiFunction(self.target)
Example #52
0
 def __init__(self, login_configfile, send_info_configfile):
     """Constructor"""
     self.configparser = ConfigParser.RawConfigParser()
     self.configparser.readfp(open(login_configfile))
     self.loginfo = dict(self.configparser.items('sender'))
Example #53
0
    def user_configuration(self, configFile=None):

        # get a logger
        logger = logging.getLogger("configuration")

        # load and parse the provided configFile, if provided
        if not configFile:
            logger.warn(
                'no user configuration file provided; using only built-in default settings'
            )
            return

        # load the config file
        try:
            configparser = ConfigParser.ConfigParser()
            configparser.readfp(open(configFile))
            logger.debug(
                'successfully read and parsed user configuration file %s' %
                configFile)
        except:
            logger.fatal('error reading user configuration file %s' %
                         configFile)
            raise

        #work_dir must be provided before initialising other directories
        self.work_dir = None

        if self.work_dir == None:
            try:
                self.work_dir = configparser.get('Paths', 'work')

            except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
                if self.work_dir == None:
                    logger.critical('Paths:work has no value!')
                    raise Exception

        # look for those items that are user-configurable, and get their values
        # sptk_bindir= ....

        # a list instead of a dict because OrderedDict is not available until 2.7
        # and I don't want to import theano here just for that one class
        # each entry is a tuple of (variable name, default value, section in config file, option name in config file)
        #
        # the type of the default value is important and controls the type that the corresponding
        # variable will have
        #
        # to set a default value of 'undefined' use an empty string
        # or the special value 'impossible', as appropriate
        #
        impossible_int = int(-99999)
        impossible_float = float(-99999.0)

        user_options = [
            ('work_dir', self.work_dir, 'Paths', 'work'),
            ('data_dir', '', 'Paths', 'data'),
            ('plot_dir', '', 'Paths', 'plot'),
            ('plot', False, 'Utility', 'plot'),
            ('profile', False, 'Utility', 'profile'),
            ('file_id_scp', os.path.join(self.work_dir,
                                         'data/file_id_list.scp'), 'Paths',
             'file_id_list'),
            ('test_id_scp', os.path.join(self.work_dir,
                                         'data/test_id_list.scp'), 'Paths',
             'test_id_list'),
            ('GV_dir', os.path.join(self.work_dir,
                                    'data/GV'), 'Paths', 'GV_dir'),
            ('in_stepw_dir', os.path.join(self.work_dir, 'data/stepw'),
             'Paths', 'in_stepw_dir'),
            ('in_mgc_dir', os.path.join(self.work_dir,
                                        'data/mgc'), 'Paths', 'in_mgc_dir'),
            ('in_lf0_dir', os.path.join(self.work_dir,
                                        'data/lf0'), 'Paths', 'in_lf0_dir'),
            ('in_bap_dir', os.path.join(self.work_dir,
                                        'data/bap'), 'Paths', 'in_bap_dir'),
            ('in_sp_dir', os.path.join(self.work_dir,
                                       'data/sp'), 'Paths', 'in_sp_dir'),
            ('in_seglf0_dir', os.path.join(self.work_dir, 'data/lf03'),
             'Paths', 'in_seglf0_dir'),

            ## for glottHMM
            ('in_F0_dir', os.path.join(self.work_dir,
                                       'data/F0'), 'Paths', 'in_F0_dir'),
            ('in_Gain_dir', os.path.join(self.work_dir,
                                         'data/Gain'), 'Paths', 'in_Gain_dir'),
            ('in_HNR_dir', os.path.join(self.work_dir,
                                        'data/HNR'), 'Paths', 'in_HNR_dir'),
            ('in_LSF_dir', os.path.join(self.work_dir,
                                        'data/LSF'), 'Paths', 'in_LSF_dir'),
            ('in_LSFsource_dir', os.path.join(self.work_dir, 'data/LSFsource'),
             'Paths', 'in_LSFsource_dir'),

            ## for joint duration
            ('in_seq_dur_dir', os.path.join(self.work_dir, 'data/S2S_dur'),
             'Paths', 'in_seq_dur_dir'),
            ('in_dur_dir', os.path.join(self.work_dir,
                                        'data/dur'), 'Paths', 'in_dur_dir'),
            ('nn_norm_temp_dir',
             os.path.join(self.work_dir,
                          'data/step_hidden9'), 'Paths', 'nn_norm_temp_dir'),
            ('process_labels_in_work_dir', False, 'Labels',
             'process_labels_in_work_dir'),
            ('label_style', 'HTS', 'Labels', 'label_style'),
            ('label_type', 'state_align', 'Labels', 'label_type'),
            ('in_label_align_dir',
             os.path.join(self.work_dir,
                          'data/label_state_align'), 'Labels', 'label_align'),
            ('question_file_name',
             os.path.join(self.work_dir, 'data/questions.hed'), 'Labels',
             'question_file_name'),
            ('silence_pattern', ['*-#+*'], 'Labels', 'silence_pattern'),
            ('subphone_feats', 'full', 'Labels', 'subphone_feats'),
            ('xpath_file_name',
             os.path.join(self.work_dir, 'data/xml_labels/xpaths.txt'),
             'Labels', 'xpath_file_name'),
            ('label_config_file', 'configuration/examplelabelconfigfile.py',
             'Labels', 'label_config'),
            ('add_frame_features', True, 'Labels', 'add_frame_features'),
            ('fill_missing_values', False, 'Labels', 'fill_missing_values'),
            ('xpath_label_align_dir',
             os.path.join(self.work_dir, 'data/label_state_align'), 'Labels',
             'xpath_label_align'),
            ('enforce_silence', False, 'Labels', 'enforce_silence'),
            ('remove_silence_using_binary_labels', False, 'Labels',
             'remove_silence_using_binary_labels'),
            ('precompile_xpaths', True, 'Labels', 'precompile_xpaths'),
            ('iterate_over_frames', True, 'Labels', 'iterate_over_frames'),
            ('appended_input_dim', 0, 'Labels', 'appended_input_dim'),
            ('buffer_size', 200000, 'Data', 'buffer_size'),
            ('train_file_number', impossible_int, 'Data', 'train_file_number'),
            ('valid_file_number', impossible_int, 'Data', 'valid_file_number'),
            ('test_file_number', impossible_int, 'Data', 'test_file_number'),
            ('log_path', os.path.join(self.work_dir,
                                      'log'), 'Paths', 'log_path'),
            ('log_file', '', 'Paths', 'log_file'),
            ('log_config_file', 'configuration/exampleloggingconfigfile.conf',
             'Paths', 'log_config_file'),
            ('sptk_bindir', 'tools/bin/SPTK-3.9', 'Paths', 'sptk'),
            ('straight_bindir', 'tools/bin/straight', 'Paths', 'straight'),
            ('world_bindir', 'tools/bin/WORLD', 'Paths', 'world'),
            ('network_type', 'RNN', 'Architecture', 'network_type'),
            ('model_type', 'DNN', 'Architecture', 'model_type'),
            ('hidden_layer_type',
             ['TANH', 'TANH', 'TANH', 'TANH', 'TANH',
              'TANH'], 'Architecture', 'hidden_layer_type'),
            ('output_layer_type', 'LINEAR', 'Architecture',
             'output_layer_type'),
            ('sequential_training', False, 'Architecture',
             'sequential_training'),
            ('dropout_rate', 0.0, 'Architecture', 'dropout_rate'),

            ## some config variables for token projection DNN
            ('scheme', 'stagewise', 'Architecture', 'scheme'),
            ('index_to_project', 0, 'Architecture', 'index_to_project'),
            ('projection_insize', 10000, 'Architecture', 'projection_insize'),
            ('projection_outsize', 10, 'Architecture', 'projection_outsize'),
            ('initial_projection_distrib', 'gaussian', 'Architecture',
             'initial_projection_distrib'),
            ('projection_weights_output_dir', 'some_path', 'Architecture',
             'projection_weights_output_dir'),
            ('layers_with_projection_input', [0], 'Architecture',
             'layers_with_projection_input'),
            ('projection_learning_rate_scaling', 1.0, 'Architecture',
             'projection_learning_rate_scaling'),
            ('learning_rate', 0.0002, 'Architecture', 'learning_rate'),
            ('l2_reg', 0.00001, 'Architecture', 'L2_regularization'),
            ('l1_reg', 0.0, 'Architecture', 'L1_regularization'),
            ('batch_size', 16, 'Architecture', 'batch_size'),
            ('training_epochs', 25, 'Architecture', 'training_epochs'),
            ('hidden_activation', 'tanh', 'Architecture', 'hidden_activation'),
            ('output_activation', 'linear', 'Architecture',
             'output_activation'),
            ('hidden_layer_size', [1024, 1024, 1024, 1024, 1024,
                                   1024], 'Architecture', 'hidden_layer_size'),
            ('private_hidden_sizes', [1024], 'Architecture',
             'private_hidden_sizes'),
            ('stream_weights', [1.0], 'Architecture', 'stream_weights'),
            ('private_l2_reg', 0.00001, 'Architecture', 'private_l2_reg'),
            ('warmup_epoch', 5, 'Architecture', 'warmup_epoch'),
            ('warmup_momentum', 0.3, 'Architecture', 'warmup_momentum'),
            ('momentum', 0.9, 'Architecture', 'momentum'),
            ('warmup_epoch', 5, 'Architecture', 'warmup_epoch'),
            ('mdn_component', 1, 'Architecture', 'mdn_component'),
            ('var_floor', 0.01, 'Architecture', 'var_floor'),
            ('beta_opt', False, 'Architecture', 'beta_opt'),
            ('eff_sample_size', 0.8, 'Architecture', 'eff_sample_size'),
            ('mean_log_det', -100.0, 'Architecture', 'mean_log_det'),
            ('start_from_trained_model', '_', 'Architecture',
             'start_from_trained_model'),
            ('use_rprop', 0, 'Architecture', 'use_rprop'),
            ('mgc_dim', 60, 'Outputs', 'mgc'),
            ('dmgc_dim', 60 * 3, 'Outputs', 'dmgc'),
            ('vuv_dim', 1, 'Outputs', 'vuv'),
            ('lf0_dim', 1, 'Outputs', 'lf0'),
            ('dlf0_dim', 1 * 3, 'Outputs', 'dlf0'),
            ('bap_dim', 25, 'Outputs', 'bap'),
            ('dbap_dim', 25 * 3, 'Outputs', 'dbap'),
            ('cmp_dim', (60 * 3) + 1 + (1 * 3) + (25 * 3), 'Outputs', 'cmp'),
            ('stepw_dim', 55, 'Outputs', 'stepw_dim'),
            ('temp_sp_dim', 1025, 'Outputs', 'temp_sp_dim'),
            ('seglf0_dim', 7, 'Outputs', 'seglf0_dim'),
            ('delta_win', [-0.5, 0.0, 0.5], 'Outputs', 'delta_win'),
            ('acc_win', [1.0, -2.0, 1.0], 'Outputs', 'acc_win'),
            ('do_MLPG', True, 'Outputs', 'do_MLPG'),

            ## for GlottHMM
            ('F0_dim', 1, 'Outputs', 'F0'),
            ('dF0_dim', 1 * 3, 'Outputs', 'dF0'),
            ('Gain_dim', 1, 'Outputs', 'Gain'),
            ('dGain_dim', 1 * 3, 'Outputs', 'dGain'),
            ('HNR_dim', 5, 'Outputs', 'HNR'),
            ('dHNR_dim', 5 * 3, 'Outputs', 'dHNR'),
            ('LSF_dim', 30, 'Outputs', 'LSF'),
            ('dLSF_dim', 30 * 3, 'Outputs', 'dLSF'),
            ('LSFsource_dim', 10, 'Outputs', 'LSFsource'),
            ('dLSFsource_dim', 10 * 3, 'Outputs', 'dLSFsource'),

            ## for joint dur:-
            ('seq_dur_dim', 1, 'Outputs', 'seq_dur'),
            ('remove_silence_from_dur', True, 'Outputs',
             'remove_silence_from_dur'),
            ('dur_dim', 5, 'Outputs', 'dur'),
            ('dur_feature_type', 'numerical', 'Outputs', 'dur_feature_type'),
            ('output_feature_normalisation', 'MVN', 'Outputs',
             'output_feature_normalisation'),
            ('multistream_switch', False, 'Streams', 'multistream_switch'),
            #            ('use_private_hidden'  , False, 'Streams', 'use_private_hidden'),
            ('output_features', ['mgc', 'lf0', 'vuv',
                                 'bap'], 'Streams', 'output_features'),
            ('gen_wav_features', ['mgc', 'bap',
                                  'lf0'], 'Streams', 'gen_wav_features'),

            #            ('stream_mgc_hidden_size'   ,  192 , 'Streams', 'stream_mgc_hidden_size'),
            #            ('stream_lf0_hidden_size'   ,  32  , 'Streams', 'stream_lf0_hidden_size'),
            #            ('stream_vuv_hidden_size'   ,  32  , 'Streams', 'stream_vuv_hidden_size'),
            #            ('stream_bap_hidden_size'   ,  128 , 'Streams', 'stream_bap_hidden_size'),
            #            ('stream_stepw_hidden_size' ,  64  , 'Streams', 'stream_stepw_hidden_size'),
            #            ('stream_seglf0_hidden_size',  64  , 'Streams', 'stream_seglf0_hidden_size'),
            #            ('stream_cmp_hidden_size'   ,  256 , 'Streams', 'stream_cmp_hidden_size'),  #when multi-stream is disabled, use this to indicate the final hidden layer size
            #if this is also not provided, use the top common hidden layer size

            ## Glott HMM -- dummy values -- haven't used private streams:--
            #            ('stream_F0_hidden_size'   ,  192 , 'Streams', 'stream_F0_hidden_size'),
            #            ('stream_Gain_hidden_size'   ,  192 , 'Streams', 'stream_Gain_hidden_size'),
            #            ('stream_HNR_hidden_size'   ,  192 , 'Streams', 'stream_HNR_hidden_size'),
            #            ('stream_LSF_hidden_size'   ,  192 , 'Streams', 'stream_LSF_hidden_size'),
            #            ('stream_LSFsource_hidden_size'   ,  192 , 'Streams', 'stream_LSFsource_hidden_size'),

            ## joint dur -- dummy values -- haven't used private streams:--
            #            ('stream_dur_hidden_size'   ,  192 , 'Streams', 'stream_dur_hidden_size'),

            #            ('stream_sp_hidden_size'    , 1024, 'Streams', 'stream_sp_hidden_size'),

            #            ('stream_weight_mgc'   , 1.0, 'Streams', 'stream_weight_mgc'),
            #            ('stream_weight_lf0'   , 3.0, 'Streams', 'stream_weight_lf0'),
            #            ('stream_weight_vuv'   , 1.0, 'Streams', 'stream_weight_vuv'),
            #            ('stream_weight_bap'   , 1.0, 'Streams', 'stream_weight_bap'),
            #            ('stream_weight_stepw' , 0.0, 'Streams', 'stream_weight_stepw'),
            #            ('stream_weight_seglf0', 1.0, 'Streams', 'stream_weight_seglf0'),
            #            ('stream_weight_sp'    , 1.0, 'Streams', 'stream_weight_sp'),

            ## Glott HMM - unused?
            #            ('stream_weight_F0'   , 1.0, 'Streams', 'stream_weight_F0'),
            #            ('stream_weight_Gain'   , 1.0, 'Streams', 'stream_weight_Gain'),
            #            ('stream_weight_HNR'   , 1.0, 'Streams', 'stream_weight_HNR'),
            #            ('stream_weight_LSF'   , 1.0, 'Streams', 'stream_weight_LSF'),
            #            ('stream_weight_LSFsource'   , 1.0, 'Streams', 'stream_weight_LSFsource'),

            ## dur - unused?
            #            ('stream_weight_dur'   , 1.0, 'Streams', 'stream_weight_dur'),
            #            ('stream_lf0_lr'       , 0.5, 'Streams', 'stream_lf0_lr'),
            #            ('stream_vuv_lr'       , 0.5, 'Streams', 'stream_vuv_lr'),
            ('vocoder_type', 'STRAIGHT', 'Waveform', 'vocoder_type'),
            ('sr', 48000, 'Waveform', 'samplerate'),
            ('fl', 4096, 'Waveform', 'framelength'),
            ('shift', 1000 * 240 / 48000, 'Waveform', 'frameshift'),
            ('sp_dim', (4096 / 2) + 1, 'Waveform', 'sp_dim'),
            # fw_alpha: 'Bark' or 'ERB' allowing deduction of alpha, or explicity float value (e.g. 0.77)
            ('fw_alpha', 0.77, 'Waveform', 'fw_alpha'),
            ('pf_coef', 1.4, 'Waveform', 'postfilter_coef'),
            ('co_coef', 2047, 'Waveform', 'minimum_phase_order'),
            ('use_cep_ap', True, 'Waveform', 'use_cep_ap'),
            ('do_post_filtering', True, 'Waveform', 'do_post_filtering'),
            ('apply_GV', False, 'Waveform', 'apply_GV'),
            ('test_synth_dir', 'test_synthesis/wav', 'Waveform',
             'test_synth_dir'),
            ('DurationModel', False, 'Processes', 'DurationModel'),
            ('AcousticModel', False, 'Processes', 'AcousticModel'),
            ('GenTestList', False, 'Processes', 'GenTestList'),
            ('NORMLAB', False, 'Processes', 'NORMLAB'),
            ('MAKEDUR', False, 'Processes', 'MAKEDUR'),
            ('MAKECMP', False, 'Processes', 'MAKECMP'),
            ('NORMCMP', False, 'Processes', 'NORMCMP'),
            ('TRAINDNN', False, 'Processes', 'TRAINDNN'),
            ('DNNGEN', False, 'Processes', 'DNNGEN'),
            ('GENWAV', False, 'Processes', 'GENWAV'),
            ('CALMCD', False, 'Processes', 'CALMCD'),
            ('NORMSTEP', False, 'Processes', 'NORMSTEP'),
            ('GENBNFEA', False, 'Processes', 'GENBNFEA'),
            ('mgc_ext', '.mgc', 'Extensions', 'mgc_ext'),
            ('bap_ext', '.bap', 'Extensions', 'bap_ext'),
            ('lf0_ext', '.lf0', 'Extensions', 'lf0_ext'),
            ('cmp_ext', '.cmp', 'Extensions', 'cmp_ext'),
            ('lab_ext', '.lab', 'Extensions', 'lab_ext'),
            ('utt_ext', '.utt', 'Extensions', 'utt_ext'),
            ('stepw_ext', '.stepw', 'Extensions', 'stepw_ext'),
            ('sp_ext', '.sp', 'Extensions', 'sp_ext'),

            ## GlottHMM
            ('F0_ext', '.F0', 'Extensions', 'F0_ext'),
            ('Gain_ext', '.Gain', 'Extensions', 'Gain_ext'),
            ('HNR_ext', '.HNR', 'Extensions', 'HNR_ext'),
            ('LSF_ext', '.LSF', 'Extensions', 'LSF_ext'),
            ('LSFsource_ext', '.LSFsource', 'Extensions', 'LSFsource_ext'),

            ## joint dur
            ('dur_ext', '.dur', 'Extensions', 'dur_ext'),
        ]

        # this uses exec(...) which is potentially dangerous since arbitrary code could be executed
        for (variable, default, section, option) in user_options:
            value = None

            try:
                # first, look for a user-set value for this variable in the config file
                value = configparser.get(section, option)
                user_or_default = 'user'

            except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
                # use default value, if there is one
                if (default == None) or \
                   (default == '')   or \
                   ((type(default) == int) and (default == impossible_int)) or \
                   ((type(default) == float) and (default == impossible_float))  :
                    logger.critical('%20s has no value!' %
                                    (section + ":" + option))
                    raise Exception
                else:
                    value = default
                    user_or_default = 'default'

            if type(default) == str:
                exec('self.%s = "%s"' % (variable, value))
            elif type(default) == int:
                exec('self.%s = int(%s)' % (variable, value))
            elif type(default) == float:
                exec('self.%s = float(%s)' % (variable, value))
            elif type(default) == bool:
                exec('self.%s = bool(%s)' % (variable, value))
            elif type(default) == list:
                exec('self.%s = list(%s)' % (variable, value))
            else:
                logger.critical(
                    'Variable %s has default value of unsupported type %s',
                    variable, type(default))
                raise Exception(
                    'Internal error in configuration settings: unsupported default type'
                )

            logger.info('%20s has %7s value %s' %
                        (section + ":" + option, user_or_default, value))

        self.combined_feature_name = ''
        for feature_name in self.output_features:
            self.combined_feature_name += '_'
            self.combined_feature_name += feature_name

        self.combined_model_name = self.model_type
        for hidden_type in self.hidden_layer_type:
            self.combined_model_name += '_' + hidden_type

        self.combined_model_name += '_' + self.output_layer_type
#############COMMAND CODE IS BELOW ######################

###########OPTIONS
parser = argparse.ArgumentParser(description='Command line parser of skim options')
parser.add_argument('--config',   dest='cfgfile',   help='Name of config file',   required = True)
parser.add_argument('--sample',   dest='samplename',   help='Type of sample: 0 signal, 1 bkgmodel',   required = True)
parser.add_argument('--casename', dest='casename',  help='Case name',   required = True)
args = parser.parse_args()
configfilename = args.cfgfile
sampletorun    = args.samplename
case           = args.casename

###########Read Config file
print "[INFO] Reading skim configuration file . . ."
cfgparser = ConfigParser()
cfgparser.read('%s'%configfilename)
##########Get skim variables
print "[INFO] Getting configuration parameters . . ."
directory   = ast.literal_eval(cfgparser.get("configuration","directory"))
print "    -The directory:"
print "      *",directory
signalsamples  = ast.literal_eval(cfgparser.get("configuration","mvasigsamples"))
print "    -The list of signal samples:"
for x in range(len(signalsamples)):
  print "      *",signalsamples[x]
bdtbkgsamples  = ast.literal_eval(cfgparser.get("configuration","mvabkgsamples"))
print "    -The list of bdtbkg samples:"
print "      *",bdtbkgsamples[0] 
tag         = ast.literal_eval(cfgparser.get("configuration","tag"))
print "    -The tag:"
Example #55
0
def get_content(toUrl,count):
    """ Return the content of given url

        Args:
            toUrl: aim url
            count: index of this connect

        Return:
            content if success
            'Fail' if fail
    """

    cf = ConfigParser.ConfigParser()
    cf.read("config.ini")
    cookie = cf.get("cookie", "cookie")

    headers = {
        'Cookie': cookie,
        'Host':'www.zhihu.com',
        'Referer':'http://www.zhihu.com/',
        'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36',
        # 'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36',
        'Accept-Encoding':'gzip'
    }

    req = urllib2.Request(
        url = toUrl,
        headers = headers
    )

    proxy_all = [
    "10.25.170.247:5678",
    "10.25.171.82:5678",
    "10.47.114.111:5678",
    "10.47.54.77:5678",
    "10.25.60.218:5678",
    "10.47.54.180:5678",
    "10.47.54.115:5678",
    "10.47.106.138:5678"
    ]
    current_proxy = random.choice(proxy_all)
    try:
        opener = urllib2.build_opener(urllib2.ProxyHandler({"http" : current_proxy}))  #urllib2.ProxyHandler()
        urllib2.install_opener(opener)
        page = urllib2.urlopen(req,timeout = 15)
        headers = page.info()
        content = page.read()
    # except Exception,e:
    #     if count % 1 == 0:
    #         print str(count) + ", Error: " + str(e) + " URL: " + toUrl
    #     return "FAIL"
    except urllib2.HTTPError, e:
        if e.code == 404:
            if count % 1 == 0:
                print str(count) + ", Error: " + str(e) + " URL: " + toUrl
            return "NO FOUND"
        else:
            try:
                page = urlopen_with_retry(req, proxy_all)
		headers = page.info()
                content = page.read()
            except Exception,e:
                if count % 1 == 0:
                    print str(count) + ", Error: " + str(e) + " URL: " + toUrl + "retry_fail"
                return "FAIL"
Example #56
0
def parse(filename):
    # Set up defaults
    defaults = {
        'pixel_size_x': 1,
        'pixel_size_y': 1,
        'get_coordinates': True,
        'draw_aspect_ratio': True,
        'save_intermediates': False,
        'disk_size_opening': 10,
        'contrast_adjustment': 3,
        'threshold_adjustment': 0,
        'disk_size_smoothing': 20,
        'downsample': True,
        'num_points': 100,
        'run3dmorph': False
    }

    required_list = ['in_directory', 'input_ext', 'out_directory', 'sampleID']
    boolean_list = [
        'get_coordinates', 'draw_aspect_ratio', 'save_intermediates',
        'downsample', 'run3dmorph'
    ]
    float_list = [
        'pixel_size_x', 'pixel_size_y', 'disk_size_opening',
        'contrast_adjustment', 'threshold_adjustment', 'disk_size_smoothing',
        'num_points'
    ]

    # Parse setting
    settings = defaults

    settings['timestamp'] = datetime.now().strftime('%Y-%m-%d at %H:%M:%S')

    parser = ConfigParser.SafeConfigParser(allow_no_value=True)
    parser.optionxform = str  # Preserve case

    if os.path.isfile(filename):
        try:
            parser.read(filename)
        except ConfigParser.MissingSectionHeaderError:
            vfile = StringIO(u'[morph2d]\n%s' % open(filename).read())
            parser.readfp(vfile)
    else:
        sys.exit("Error: Cannot open settings file " + filename)

    # Set optional variables
    for setting in parser.options('morph2d'):
        if setting in boolean_list:
            try:
                settings[setting] = parser.getboolean('morph2d', setting)
            except:
                pass
        elif setting in float_list:
            try:
                settings[setting] = parser.getfloat('morph2d', setting)
            except:
                pass
        else:
            settings[setting] = str(parser.get('morph2d', setting))

    # Check for required parameters
    for required in required_list:
        if required not in settings.keys():
            sys.exit('Error: ' + required + ' must be set in settings file.')

    # Set default output directory if none specified
    if not settings['out_directory']:
        settings[
            'out_directory'] = settings['in_directory'] + os.sep + 'morph2d'

    # Set up additional global settings
    if settings['in_directory'].endswith(os.sep):
        settings['in_directory'] = settings['in_directory'].rstrip(os.sep)

    return settings
Example #57
0
def gather_tests(include=[], exclude=[], benchmarker=None):
    '''
  Given test names as strings, returns a list of FrameworkTest objects. 
  For example, 'aspnet-mysql-raw' turns into a FrameworkTest object with
  variables for checking the test directory, the test database os, and 
  other useful items. 

  With no arguments, every test in this framework will be returned.  
  With include, only tests with this exact name will be returned. 
  With exclude, all tests but those excluded will be returned. 

  A benchmarker is needed to construct full FrameworkTest objects. If
  one is not provided, a default Benchmarker will be created. 
  '''

    # Avoid setting up a circular import
    from benchmark import framework_test
    from benchmark.benchmarker import Benchmarker
    from setup.linux import setup_util

    # Help callers out a bit
    if include is None:
        include = []
    if exclude is None:
        exclude = []

    # Old, hacky method to exclude all tests was to
    # request a test known to not exist, such as ''.
    # If test '' was requested, short-circuit and return
    # nothing immediately
    if len(include) == 1 and '' in include:
        return []

    # Setup default Benchmarker using example configuration
    if benchmarker is None:
        print "Creating Benchmarker from benchmark.cfg.example"
        default_config = setup_util.get_fwroot() + "/benchmark.cfg.example"
        config = ConfigParser.SafeConfigParser()
        config.readfp(open(default_config))
        defaults = dict(config.items("Defaults"))

        # Convert strings into proper python types
        for k, v in defaults.iteritems():
            try:
                defaults[k] = literal_eval(v)
            except Exception:
                pass

        # Ensure we only run the __init__ method of Benchmarker
        defaults['install'] = None

        benchmarker = Benchmarker(defaults)

    # Search in both old and new directories
    fwroot = setup_util.get_fwroot()
    config_files = glob.glob("%s/*/benchmark_config.json" % fwroot)
    config_files.extend(
        glob.glob("%s/frameworks/*/*/benchmark_config.json" % fwroot))

    tests = []
    for config_file_name in config_files:
        config = None
        with open(config_file_name, 'r') as config_file:
            try:
                config = json.load(config_file)
            except ValueError:
                # User-friendly errors
                print("Error loading '%s'." % config_file_name)
                raise

        # Find all tests in the config file
        config_tests = framework_test.parse_config(
            config, os.path.dirname(config_file_name), benchmarker)

        # Filter
        for test in config_tests:
            if len(include) is 0 and len(exclude) is 0:
                # No filters, we are running everything
                tests.append(test)
            elif test.name in exclude:
                continue
            elif test.name in include:
                tests.append(test)
            else:
                # An include list exists, but this test is
                # not listed there, so we ignore it
                pass

    # Ensure we were able to locate everything that was
    # explicitly included
    if 0 != len(include):
        names = {test.name for test in tests}
        if 0 != len(set(include) - set(names)):
            missing = list(set(include) - set(names))
            raise Exception("Unable to locate tests %s" % missing)

    tests.sort(key=lambda x: x.name)
    return tests
import subprocess
import ConfigParser

# Read conf file
config = ConfigParser.RawConfigParser()
configFilePath = 'config.conf'
goldConfig = '../goldImage/goldconfig.conf'
config.read(configFilePath)

lang = config.get('Section', 'lang')
dataset = config.get('Section','dataset')
script = config.get('Section', 'script')
package = config.get('Section', 'package')
packagelist = package.split()

config.read(goldConfig)
goldpackage = config.get('Section', 'package')
goldpackagelist = goldpackage.split()

delete = []
install = []

for idx, golditem in enumerate(goldpackagelist):
	if golditem not in packagelist:
		delete.append(golditem)
strdelete = ' '.join(delete)

for idx, deliveritem in enumerate(packagelist):
	if deliveritem not in goldpackagelist:
		install.append(deliveritem)
strinstall = ' '.join(install)
Example #59
0
def read_and_set_configuration(config_path):
    """Reads the worker configuration from the file at config_path and sets the read configuration to
    the env variable.

    The configuration is read of the path, put into a dictionary which will be serialized and set in the env
    variable.

    Notes:
        The WORKER_VERSION has to be set manually for now.
        The COMPONENT has to be set manually at the entry point of each component (worker/sandbox).

    Args:
        config_path: string, the configuration file path.
    """
    clear_config()

    # init and set default values for optional configuration keys
    config = ConfigParser.SafeConfigParser({
        DEBUG_TRACES:
        DEFAULT_DEBUG_TRACES,
        BYPASS_CERTIFICATE_VERIFICATION:
        DEFAUTL_BYPASS_CERTIFICATE_VERIFICATION,
        ENFORCE_RUNBOOK_SIGNATURE_VALIDATION:
        DEFAULT_ENFORCE_RUNBOOK_SIGNATURE_VALIDATION,
        GPG_PUBLIC_KEYRING_PATH:
        DEFAULT_GPG_PUBLIC_KEYRING_PATH,
        STATE_DIRECTORY_PATH:
        DEFAULT_STATE_DIRECTORY_PATH,
        JRDS_POLLING_FREQUENCY:
        DEFAULT_JRDS_POLLING_FREQUENCY,
        PROXY_CONFIGURATION_PATH:
        DEFAULT_PROXY_CONFIGURATION_PATH
    })

    # load the worker configuration file
    config.read(config_path)

    # create the configuration dictionary
    # read required configuration values
    configuration = {
        CERT_PATH:
        os.path.abspath(config.get(WORKER_REQUIRED_CONFIG_SECTION, CERT_PATH)),
        KEY_PATH:
        os.path.abspath(config.get(WORKER_REQUIRED_CONFIG_SECTION, KEY_PATH)),
        BASE_URI:
        config.get(WORKER_REQUIRED_CONFIG_SECTION, BASE_URI),
        ACCOUNT_ID:
        config.get(WORKER_REQUIRED_CONFIG_SECTION, ACCOUNT_ID),
        MACHINE_ID:
        config.get(WORKER_REQUIRED_CONFIG_SECTION, MACHINE_ID),
        HYBRID_WORKER_GROUP_NAME:
        config.get(WORKER_REQUIRED_CONFIG_SECTION, HYBRID_WORKER_GROUP_NAME),
        WORKING_DIRECTORY_PATH:
        os.path.abspath(
            config.get(WORKER_REQUIRED_CONFIG_SECTION,
                       WORKING_DIRECTORY_PATH)),
        SOURCE_DIRECTORY_PATH:
        os.path.dirname(os.path.realpath(__file__)),
        WORKER_VERSION:
        DEFAULT_WORKER_VERSION,
        COMPONENT:
        DEFAULT_COMPONENT
    }

    # read optional configuration section
    configuration.update({
        DEBUG_TRACES:
        config.getboolean(WORKER_OPTIONAL_CONFIG_SECTION, DEBUG_TRACES),
        BYPASS_CERTIFICATE_VERIFICATION:
        config.getboolean(WORKER_OPTIONAL_CONFIG_SECTION,
                          BYPASS_CERTIFICATE_VERIFICATION),
        ENFORCE_RUNBOOK_SIGNATURE_VALIDATION:
        config.getboolean(WORKER_OPTIONAL_CONFIG_SECTION,
                          ENFORCE_RUNBOOK_SIGNATURE_VALIDATION),
        GPG_PUBLIC_KEYRING_PATH:
        config.get(WORKER_OPTIONAL_CONFIG_SECTION, GPG_PUBLIC_KEYRING_PATH),
        STATE_DIRECTORY_PATH:
        config.get(WORKER_OPTIONAL_CONFIG_SECTION, STATE_DIRECTORY_PATH),
        JRDS_POLLING_FREQUENCY:
        config.getint(WORKER_OPTIONAL_CONFIG_SECTION, JRDS_POLLING_FREQUENCY),
        PROXY_CONFIGURATION_PATH:
        config.get(WORKER_OPTIONAL_CONFIG_SECTION, PROXY_CONFIGURATION_PATH)
    })

    # set the worker conf to env var
    set_config(configuration)
def main():
    # Load cluster.conf
    parser = ConfigParser.SafeConfigParser()
    parser.read(CLUSTER_CONF)

    # Get default log directory
    log_directory = parser.get("cluster", "log_directory")

    parser = argparse.ArgumentParser(
        description="coordinates the execution of Themis jobs")
    parser.add_argument("themis_binary", help="path to the Themis binary")
    parser.add_argument("config", help="a YAML file giving configuration "
                        "options for Themis")
    parser.add_argument("--log_directory", "-l",
                        help="the directory in which to store coordinator logs "
                        "(default: %(default)s)", default=log_directory)
    parser.add_argument("--keepalive_refresh", help="the length of time node "
                        "coordinators should wait between refreshing keepalive "
                        "information (default: %(default)s seconds)", type=int,
                        default=2)
    parser.add_argument("--keepalive_timeout", help="the amount of time that "
                        "must pass without receiving a keepalive message from "
                        "a node coordinator before the cluster coordinator "
                        "considers that node to be dead (default: %(default)s "
                        "seconds)", type=int, default=10)
    parser.add_argument("--profiler", help="path to the binary of a profiling"
                        "tool to use, for example valgrind or operf")
    parser.add_argument("--profiler_options", help="options surrounded by "
                        "quotes to pass to the profiler")
    parser.add_argument("--ld_preload", help="Path to a library to be "
                        "preloaded using LD_PRELOAD.")

    utils.add_redis_params(parser)
    utils.add_interfaces_params(parser)

    args = parser.parse_args()

    args.config = os.path.abspath(args.config)

    args.log_directory = create_log_directory(args.log_directory)
    log.info("Logging to %s" % (args.log_directory))

    job_status_gui = None
    job_status_gui_out_fp = None

    resource_monitor_gui = None
    resource_monitor_gui_out_fp = None

    coordinator = None

    try:
        # To make the status GUI port distinct for each user but deterministic
        # for a single user, use 2000 + (the md5 hash of the user's username
        # mod 1000) as the web GUI's port number
        username_md5sum = hashlib.md5()
        username_md5sum.update(getpass.getuser())

        job_status_gui_port = (
            (int(username_md5sum.hexdigest(), 16) % 1000 + 2000) / 10) * 10
        resource_monitor_gui_port = (
            (int(username_md5sum.hexdigest(), 16) % 1000 + 3200) / 10) * 10


        print ""

        # Start the resource monitor web GUI
        resource_monitor_gui, resource_monitor_gui_out_fp = \
            start_resource_monitor_gui(args, resource_monitor_gui_port)

        # Start the job status web GUI
        job_status_gui, job_status_gui_out_fp = start_job_status_gui(
            args, job_status_gui_port)

        print ""

        coordinator = ClusterCoordinator(**vars(args))
        coordinator.run()
    finally:
        if job_status_gui is not None:
            log.info("Stopping job status GUI (PID %d)" % (job_status_gui.pid))
            os.killpg(job_status_gui.pid, signal.SIGTERM)
            job_status_gui.wait()


        if job_status_gui_out_fp is not None:
            job_status_gui_out_fp.flush()
            job_status_gui_out_fp.close()

        if resource_monitor_gui is not None:
            log.info("Stopping resource monitor GUI (PID %d)" % (
                    resource_monitor_gui.pid))
            os.killpg(resource_monitor_gui.pid, signal.SIGTERM)
            resource_monitor_gui.wait()

        if resource_monitor_gui_out_fp is not None:
            resource_monitor_gui_out_fp.flush()
            resource_monitor_gui_out_fp.close()

        if coordinator is not None:
            log.info("Stopping node coordinators")
            coordinator.stop_node_coordinators()