def main(self): parser = SafeConfigParser() try: parser.read(self.base_folder + '/' + self.CONFIG_FILENAME) self.server = parser.get(self.CONFIG_SECTION, 'server') self.port = parser.getint(self.CONFIG_SECTION, 'port') self.user = parser.get(self.CONFIG_SECTION, 'user') self.password = parser.get(self.CONFIG_SECTION, 'password') self.timeout = parser.getint(self.CONFIG_SECTION, 'timeout') rpc_interval = parser.getint(self.CONFIG_SECTION, 'rpc_interval') except Exception as e : print( e ); self.displayErrorAndExit( "Could not read configuration '"+self.CONFIG_FILENAME+"'\nError is: "+e.message) if parser.has_option(self.CONFIG_SECTION, "rpc_logger_level"): transmissionrpc.utils.add_stdout_logger(parser.get(self.CONFIG_SECTION, 'rpc_logger_level')); self.check_alt_speed_enabled() # The callback function is called repeatedly until it returns False, # at which point the timeout is automatically destroyed and the function will not be called again # http://www.pygtk.org/pygtk2reference/gobject-functions.html#function-gobject--timeout-add gtk.timeout_add( rpc_interval * 1000, self.check_alt_speed_enabled) gtk.main()
class Bench: def __init__(self): self.config = SafeConfigParser() self.config.readfp(open('./bench.conf', 'rb')) self.host = self.config.get("server", "address") self.port = self.config.getint("server", "port") self.max_clients = self.config.getint("bench", "clients") def createSocket(self): sock = XMLSocket(socket.AF_INET, socket.SOCK_STREAM) sock.connect( (self.host, self.port) ) return sock def run(self): self.clients = [] for ii in range(self.max_clients): self.clients.append(Client(ii, (self.host, self.port) )) print "Clients queue has been initialized" print "Starting %d clients..." % self.max_clients for c in self.clients: c.start() print "Waiting for 3 sec" time.sleep(30) for c in self.clients: c.running = 0 print "All clients are started" for c in self.clients: c.join()
def parse_config(): '''parse_config() -> (toth, inicio, timestamp) toth -> int inicio -> int timestamp -> datetime.date Analisa o arquivo em CONFIG e retorna o total de horas disponíveis, o dia da semana de início da contagem (no formato ISO) e a data do último crédito de horas, além de instanciar as atividades.''' parser = SafeConfigParser() try: parser.readfp(codecs.open(CONFIG, 'r', ENCODING)) except IOError: raise ArquivoError('Nenhum arquivo de configuração encontrado.') try: toth = parser.getint(HEADER, 'disponivel') inicio = parser.getint(HEADER, 'inicio') timestamp = parser.getint(HEADER, 'timestamp') if timestamp: ano, timestamp = divmod(timestamp, 10000) mes, dia = divmod(timestamp, 100) timestamp = datetime.date(ano, mes, dia) for a in parser.sections(): if a != HEADER: kwargs = {'nome':a, 'pts':parser.getfloat(a, 'pts'), 'saldo':parser.getfloat(a, 'saldo')} Atividade(**kwargs) except (TypeError, NoSectionError): raise ArquivoError('Arquivo de configuração corrompido.') return (toth, inicio, timestamp)
class MilightConfig(): def __init__(self): app_path = os.path.dirname(os.path.realpath(__file__)) self.configfile = app_path + "/config.ini" self.config = SafeConfigParser() self.config.read( self.configfile ) self.milight_hostname = self.config.get('MILIGHT','hostname') self.milight_port = self.config.getint('MILIGHT','port') self.pixel_interval = self.config.getint('CPU_OPTMIZATION', 'pixel_interval') # interval between pixels self.time_interval = self.config.getfloat('CPU_OPTMIZATION', 'time_interval') # time interval self.debug = self.config.getboolean('CPU_OPTMIZATION', 'debug') # show color def save_config(self): self.config = SafeConfigParser() self.config.add_section('MILIGHT') self.config.set('MILIGHT', 'hostname', self.milight_hostname ) self.config.set('MILIGHT', 'port', str( int(self.milight_port) ) ) self.config.add_section('CPU_OPTMIZATION') self.config.set('CPU_OPTMIZATION', 'pixel_interval', str( int(self.pixel_interval) ) ) self.config.set('CPU_OPTMIZATION', 'time_interval', str(self.time_interval) ) self.config.set('CPU_OPTMIZATION', 'debug', str(self.debug) ) with open(self.configfile, 'wb') as configfile: self.config.write(configfile)
def get(name, rc_file='~/.oerplibrc'): """Return the session configuration identified by `name` from the `rc_file` file. >>> import oerplib >>> oerplib.tools.session.get('foo') {'protocol': 'xmlrpc', 'user': '******', 'timeout': 120, 'database': 'db_name', 'passwd': 'admin', 'type': 'OERP', 'port': 8069, 'server': 'localhost'} :raise: :class:`oerplib.error.Error` """ conf = SafeConfigParser() conf.read([os.path.expanduser(rc_file)]) if not conf.has_section(name): raise error.Error( "'{0}' session does not exist".format(name)) return { 'type': conf.get(name, 'type'), 'server': conf.get(name, 'server'), 'protocol': conf.get(name, 'protocol'), 'port': conf.getint(name, 'port'), 'timeout': conf.getint(name, 'timeout'), 'user': conf.get(name, 'user'), 'passwd': conf.get(name, 'passwd'), 'database': conf.get(name, 'database'), }
def __init__(self, config_path): parser = SafeConfigParser() parser.read(config_path) # loading kancolle browser name self.browser = parser.get('system', 'KANCOLLE_BROWSER') # loading sleep time for each round (second) self.sleep_time = parser.getint('system', 'WAIT_TIME_SECOND') # loading docker number for repairing self.docker_num = parser.getint('fleet', 'BATHROOM_NUM') # loading enable setting self.fight_enabled = parser.getboolean('enable', 'fight') self.dismantling_enabled = parser.getboolean('enable', 'dismantling') self.quest_enabled = parser.getboolean('enable', 'quest') self.expedition_enabled = parser.getboolean('enable', 'expedition') self.docking_enabled = parser.getboolean('enable', 'docking') # loading fight fleet self.fight_fleets = [] for fleet_num, fight_world in self.__get_section_dict(parser, "fight").items(): self.fight_fleets.append(Fleet(int(fleet_num))) # loading expedition fleet and expedition number self.expedition_fleets = [] self.expeditions = [] for fleet_num, expedition_num in self.__get_section_dict(parser, "expedition").items(): self.expedition_fleets.append(Fleet(int(fleet_num))) self.expeditions.append(Expedition(int(expedition_num))) # loading quests self.quests_list = [] for type, ids_raw_str in self.__get_section_dict(parser, "quests").items(): id_list = [id.strip() for id in ids_raw_str.split(',')] self.quests_list.append(Quests(type, id_list))
def refresh_config(self): parser = SafeConfigParser() parser.read("server.cfg") self.udp_port = parser.getint("heartbeat", "udp_port") self.check_period = parser.getint("heartbeat", "check_period") self.check_timeout = parser.getint("heartbeat", "check_timeout") self.server_ip = parser.get("heartbeat", "server_ip")
def config(): global settings config = SafeConfigParser() config.read(CONFIG_FILE) settings['main'] = {} settings['main']['update_interval'] = config.getint('main','update_interval') settings['servos'] = {} servo_settings = {'tilt_gpio':int, 'pan_gpio':int, 'servo_frequency':float, 'low_width':float, 'high_width':float, 'pan_left_limit':float, 'pan_right_limit':float, 'tilt_top_limit':float, 'tilt_bottom_limit':float, 'min_servo_speed':float, 'max_servo_speed':float} for (key,key_type) in servo_settings.items(): if key_type == int: settings['servos'][key] = config.getint('servos',key) elif key_type == float: settings['servos'][key] = config.getfloat('servos',key) else: print "Unknown key type: "+str(key_type) settings['lasers'] = {} settings['lasers']['laser1_gpio'] = config.getint('lasers','laser1_gpio') settings['lasers']['laser2_gpio'] = config.getint('lasers','laser2_gpio') config_servos()
def read_sequence(conf_fname, smooth=False): """ Read sequence-wide parameters, such as unchanging particle properties and frame range. Values are stored in an INI-format file. Arguments: conf_fname - name of the config file smooth - whether the sequence shoud use tracers trajectory-smoothing. Returns: a Sequence object initialized with the configuration values found. """ parser = SafeConfigParser() parser.read(conf_fname) particle = Particle( parser.getfloat("Particle", "diameter"), parser.getfloat("Particle", "density")) frate = parser.getfloat("Scene", "frame rate") tracer_tmpl = parser.get("Scene", "tracers file") part_tmpl = parser.get("Scene", "particles file") frange = (parser.getint("Scene", "first frame"), parser.getint("Scene", "last frame") + 1) return Sequence(frange, frate, particle, part_tmpl, tracer_tmpl, smooth)
def init_conf_info(self): """ """ parser = SafeConfigParser() parser.read('./conf/NovelClusterModule.conf') self.start_site_id = parser.getint('cluster_node_module', 'proc_start_site_id') self.end_site_id = parser.getint('cluster_node_module', 'proc_end_site_id')
def read_dual_scene(conf_fname): """ Read dual-scene parameters, such as unchanging particle properties and frame range. Values are stored in an INI-format file. Arguments: conf_fname - name of the config file Returns: a DualScene object initialized with the configuration values found. """ parser = SafeConfigParser() parser.read(conf_fname) particle = Particle( parser.getfloat("Particle", "diameter"), parser.getfloat("Particle", "density")) frate = parser.getfloat("Scene", "frame rate") tracer_file = parser.get("Scene", "tracers file") part_file = parser.get("Scene", "particles file") frange = (parser.getint("Scene", "first frame"), parser.getint("Scene", "last frame") + 1) return DualScene(tracer_file, part_file, frate, particle, frange)
def readConfig(self,config): try: parser = SafeConfigParser() parser.read(config) self.JSON_FILE = parser.get('main','JSON_FILE') self.CACTI_LOGFILE = parser.get('main','CACTI_LOGFILE') self.PIDFILE = parser.get('main','PIDFILE') self.NOTIFICATION_DELAY = parser.getint('main','NOTIFICATION_DELAY') self.NOTIFICATION_WINDOW = parser.getint('main','NOTIFICATION_WINDOW') self.TIME_BEFORE_RESET = parser.getint('main','TIME_BEFORE_RESET') self.NOTIFICATION_METHOD = parser.getint('main','NOTIFICATION_METHOD') self.SUMMARY = parser.get('main','SUMMARY') self.ITSM_URL = parser.get('itsm','ITSM_URL') self.ITSM_LOCATION = parser.get('itsm','ITSM_LOCATION') self.ITSM_USERNAME = parser.get('itsm','ITSM_USERNAME') self.ITSM_PASSWORD = parser.get('itsm','ITSM_PASSWORD') self.ASSIGNED_GROUP = parser.get('itsm','ASSIGNED_GROUP') self.ASSIGNED_SUPPORT_COMPANY = parser.get('itsm','ASSIGNED_SUPPORT_COMPANY') self.FIRST_NAME = parser.get('itsm','FIRST_NAME') self.IMPACT = parser.get('itsm','IMPACT') self.LAST_NAME = parser.get('itsm','LAST_NAME') self.REPORTED_SOURCE = parser.get('itsm','REPORTED_SOURCE') self.SERVICE_TYPE = parser.get('itsm','SERVICE_TYPE') self.STATUS = parser.get('itsm','STATUS') self.ACTION = parser.get('itsm','ACTION') self.CREATE_REQUEST = parser.get('itsm','CREATE_REQUEST') self.URGENCY = parser.get('itsm','URGENCY') self.EMAIL_TO = parser.get('email','EMAIL_TO') self.EMAIL_FROM = parser.get('email','EMAIL_FROM') except: logging.error('A problem ocurred when parsing the %s config file. Exception: %s %s',CONFIG_FILE,str(sys.exc_info()[0]),str(sys.exc_info()[1])) sys.exit(ERR_CONFIG_FILE)
def __init__(self,config=None,opts=None): # not enough info to execute if config==None and opts==None: print "Please specify command option or config file ..." return # config parser parser = SafeConfigParser() parser.read(config) self.debug = parser.getboolean('knn','debug') self.seed = parser.getint('knn','random_seed') self.obj = 'dt' self.trainfile = parser.get('knn','train') self.validfile = parser.get('knn','valid') self.testfile = parser.get('knn','test') self.vocabfile = parser.get('knn','vocab') self.domain = parser.get('knn','domain') self.percentage = float(parser.getfloat('knn','percentage'))/100.0 # Setting generation specific parameters self.topk = parser.getint('knn','topk') self.detectpairs= parser.get('knn','detectpairs') self.verbose = parser.getint('knn','verbose') # set random seed np.random.seed(self.seed) random.seed(self.seed) np.set_printoptions(precision=4) # setting data reader, processors, and lexicon self.setupSideOperators()
def load_settings(self): self.default_settings() if not os.path.isfile(self.config_file_name): return config = SafeConfigParser() config.read(self.config_file_name) self.input_directory = config.get('main', 'input_directory').decode('utf-8') self.output_directory = config.get('main', 'output_directory').decode('utf-8') self.video_width = config.getint('main', 'video_width') self.video_height = config.getint('main', 'video_height') self.shift_start = config.getfloat('main', 'pad_start') self.shift_end = config.getfloat('main', 'pad_end') self.time_delta = config.getfloat('main', 'gap_between_phrases') self.is_split_long_phrases = config.getboolean('main', 'is_split_long_phrases') self.phrases_duration_limit = config.getint('main', 'phrases_duration_limit') self.mode = config.get('main', 'mode') self.is_write_output_subtitles = config.getboolean('main', 'is_write_output_subtitles') self.is_ignore_sdh_subtitle = config.getboolean('main', 'is_ignore_sdh_subtitle') self.is_add_dir_to_media_path = config.getboolean('main', 'is_add_dir_to_media_path') value = [e.strip() for e in config.get('main', 'recent_deck_names').decode('utf-8').split(',')] if len(value) != 0: self.recent_deck_names.extendleft(value)
def advance(status): """Continue job saved with last checkpoint saved in status file.""" cfg = SafeConfigParser() cfg.read(status) try: max_iter = cfg.getint("caffe", "last_iter") except: max_iter = 0 last_checkpoint, current_iter = get_checkpoint(cfg) if max_iter and current_iter >= max_iter: return current_run = cfg.getint("status", "current_run") + 1 cfg.set("status", "current_run", str(current_run)) save_config(cfg) update_solver(cfg) pbs = generate_pbs(cfg, last_checkpoint) submit(pbs)
def get(name, rc_file='~/.odoorpcrc'): """Return the session configuration identified by `name` from the `rc_file` file. >>> import odoorpc >>> odoorpc.tools.session.get('foo') {'protocol': 'jsonrpc', 'user': '******', 'timeout': 120, 'database': 'db_name', 'passwd': 'admin', 'type': 'ODOO', 'port': 8069, 'host': 'localhost'} :raise: `ValueError` (wrong session name) """ conf = ConfigParser() conf.read([os.path.expanduser(rc_file)]) if not conf.has_section(name): raise ValueError( "'%s' session does not exist in %s" % (name, rc_file)) return { 'type': conf.get(name, 'type'), 'host': conf.get(name, 'host'), 'protocol': conf.get(name, 'protocol'), 'port': conf.getint(name, 'port'), 'timeout': conf.getint(name, 'timeout'), 'user': conf.get(name, 'user'), 'passwd': conf.get(name, 'passwd'), 'database': conf.get(name, 'database'), }
def fetch(version=''): # Download update.ver archive urlretrieve('http://update.eset.com/eset_upd/' + version + '/update.ver', '/tmp/' + version + '_update.ver.rar') # Extract update.ver RarFile('/tmp/' + version + '_update.ver.rar').extract('update.ver', path='/tmp/' + version) # Load update.ver config = SafeConfigParser() config.read('/tmp/' + version + '/update.ver') # Remove original host and expire section. config.remove_section('HOSTS') config.remove_section('Expire') # Force use my host config.add_section('HOSTS') config.set('HOSTS', 'Other', '200@http://WE_CLOUD/eset_upd/' + version) # Only fetch en-US and zh-CN for section in config.sections(): if config.has_option(section, 'language'): if config.getint(section, 'language') != 1033 and config.getint(section, 'language') != 2052: config.remove_section(section) # Save update.ver with open(out_dir + 'eset_upd/' + version + '/update.ver', 'w') as ver_file: config.write(ver_file) # Process each file for section in config.sections(): if config.has_option(section, 'file'): filename = config.get(section, 'file') if filename[:1] != '/': filename = '/eset_upd/' + version + '/' + filename dir = filename[1:find(filename, '/', 1)] if dir[rfind(dir, '-') + 1:] != 'sta': continue if config.has_option(section, 'size'): file_size = config.getint(section, 'size') else: file_size = -1 if config.has_option(section, 'build'): file_build = config.getint(section, 'build') cursor.execute("SELECT * FROM eset WHERE file = '" + filename + "' AND build = " + str(file_build)) if not cursor.rowcount: process(filename[1:], file_size) cursor.execute("UPDATE eset SET build = " + str(file_build) + " WHERE file = '" + filename + "'") if not cursor.rowcount: cursor.execute("INSERT INTO eset (file, build) VALUES ('" + filename + "', " + str(file_build) + ")") db.commit() else: process(filename[1:], file_size)
def __init__(self, *args, **kwargs): luigi.Task.__init__(self, *args, **kwargs) parser = SafeConfigParser() parser.read(self.conf) root = parser.get("basic", "root") self.topic_num = parser.getint('plda+', 'topic_num') self.n_components = parser.getint('svd', 'n_components') self.index = '%s/data/target/paper.topic.index' % root self.ids = '%s/data/target/paper.id' % root
def loadConfig(self): parser = SafeConfigParser() parser.read('/opt/Catdoor/core/config.ini') self.pin_irsensor_in = parser.getint('pin_settings', 'irsensor_in') self.pin_buzzer_out = parser.getint('pin_settings', 'buzzer_out') self.motor_delay = parser.getfloat('motor_settings', 'delay') self.motor_distance = parser.getint('motor_settings', 'distance')
def __init__(self, *args, **kwargs): luigi.Task.__init__(self, *args, **kwargs) parser = SafeConfigParser() parser.read(self.conf) root = parser.get("basic", "root") self.keep_n = parser.getint("basic", "dict_keep_n") self.no_below = parser.getint("basic", "no_below") self.no_above = parser.getfloat("basic", "no_above") self.dict = '%s/data/train/paper.sampled.dict' % root
class optionReader(object): def __init__(self): self.options = Options() self.config = SafeConfigParser() self.config.read([self.options['config']]) def getEmail(self): sender = self.config.get('Email', 'From') recepient = self.config.get('Email', 'To') return (sender, recepient) def getFTPDirectories(self): localLog = self.config.get('FTP', 'localLogDir') remoteLog = self.config.get('FTP', 'remoteLogDir') return (localLog, remoteLog) def getRecipeDirectories(self): localDir = self.config.get('FTP', 'localRecipeDir') remoteDir = self.config.get('FTP', 'remoteRecipeDir') return (localDir, remoteDir) def getLoggerHeader(self): return flatten(self.config.get('SLC', 'Header').split(',')) def getFTPTime(self): return self.config.getint('FTP', 'FTPTime') def getPLCVariables(self): return self.config.get('SLC', 'Parameters').split(',') def getOEETime(self): return self.config.getint('RS-232', 'OEETime') def getPLCAlarms(self): return self.config.get('SLC', 'Alarms').split(',') def getPLCRecipe(self): return self.config.get('SLC', 'Recipe').split(',') def getAlarmTime(self): return self.config.getfloat('RS-232', 'AlarmTime') def getFTPparms(self): host = self.config.get('FTP', 'host') port = self.config.getint('FTP', 'port') return (host, port) def getRS232parms(self): host = self.config.get('RS-232', 'host') baud = self.config.getint('RS-232', 'baudrate') return (host, baud) def getRS422Parms(self): host = self.config.get('RS-422', 'host') baud = self.config.getint('RS-422', 'baudrate') return (host, baud)
def infer_topic(in_fn, model_fn, out_fn, conf): parser = SafeConfigParser() parser.read(conf) root = parser.get("basic", "root") hadoop_stream = parser.get('basic', 'hadoop_stream') topic_num = parser.getint('plda+', 'topic_num') alpha = 50.0 / topic_num task_id = uuid.uuid4() infer_in_path = "%s/%s" % (parser.get('plda+', 'infer_in_path'), task_id) infer_out_path = "%s/%s" % (parser.get('plda+', 'infer_out_path'), task_id) infer_burn_in_iter = parser.getint('plda+', 'infer_burn_in_iter') infer_total_iter = parser.getint('plda+', 'infer_total_iter') infer_reduce_tasks = parser.getint('plda+', 'infer_reduce_tasks') infer_reducer_mb = parser.getint('plda+', 'infer_reducer_mb') mapper = '%s/plda/infer_mapper' % root reducer = '%s/plda/infer_reducer' % root reducer_wrapper = '%s/data/temp/reducer_wrapper.sh' % root hdfs = luigi.contrib.hdfs.hadoopcli_clients.create_hadoopcli_client() hdfs.mkdir(infer_in_path) hdfs.put(in_fn, infer_in_path) with open(reducer_wrapper, 'w') as wrapper_fd: print >> wrapper_fd, "#!/bin/bash" print >> wrapper_fd, "./infer_reducer --alpha %f --beta 0.01 --model_file ./%s --burn_in_iterations %d --total_iterations %d -sparse true" % \ (alpha, os.path.basename(model_fn), infer_burn_in_iter, infer_total_iter) cmd = '''hadoop jar %s \ -D mapred.job.name="mr plda+ infer" \ -D mapred.job.map.memory.mb=32 \ -D mapred.job.reduce.memory.mb=%d \ -D io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec \ -input %s \ -output %s \ -file %s \ -file %s \ -file %s \ -file %s \ -mapper ./infer_mapper \ -reducer ./reducer_wrapper.sh \ -numReduceTasks %d ''' cmd = cmd % (hadoop_stream, infer_reducer_mb, infer_in_path, infer_out_path, model_fn, mapper, reducer, reducer_wrapper, infer_reduce_tasks) os.system(cmd) os.remove(reducer_wrapper) if check_mr_success(infer_out_path): with open(out_fn, 'w') as out_fd: get_mr_dir(infer_out_path, out_fd) hdfs.remove(infer_in_path) hdfs.remove(infer_out_path) else: hdfs.remove(infer_in_path) hdfs.remove(infer_out_path) raise Exception("failed to infer topic")
def __init__(self, *args, **kwargs): luigi.Task.__init__(self, *args, **kwargs) parser = SafeConfigParser() parser.read(self.conf) root = parser.get("basic", "root") self.batch = parser.getint("rec", "batch") self.threshold = parser.getfloat("rec", "threshold") self.thread_num = parser.getint("rec", "cpu_core_num") self.topk = parser.getint("rec", "topk") self.rec = '%s/data/user/user.rec' % root
def __init__(self, *args, **kwargs): luigi.Task.__init__(self, *args, **kwargs) parser = SafeConfigParser() parser.read(self.conf) root = parser.get("basic", "root") self.svd_model = '%s/data/train/svd.model/svd.model' % root self.sample_fraction = parser.getfloat('svd', 'sample_fraction') self.n_components = parser.getint('svd', 'n_components') self.sampled_doc = '%s/data/temp/paper.topic.sampled' % root self.topic_num = parser.getint('plda+', 'topic_num')
def __init__(self, *args, **kwargs): luigi.Task.__init__(self, *args, **kwargs) parser = SafeConfigParser() parser.read(self.conf) root = parser.get("basic", "root") self.topic_num = parser.getint('plda+', 'topic_num') self.shard_size = parser.getint('index', 'shard_size') self.index_prefix = '%s/data/target/index/index' % root self.index = '%s/data/target/paper.topic.index' % root self.ids = '%s/data/target/paper.id' % root
def __init__(self): """parameters from parameters.ini""" parser = SafeConfigParser() parser.read(configfile) self.mog2History = parser.getint('PeopleCounting', 'mog2History') self.mog2VarThrsh = parser.getint('PeopleCounting', 'mog2VarThrsh') self.mog2Shadow = parser.getboolean('PeopleCounting', 'mog2Shadow') self.mog2LearningRate = parser.getfloat('PeopleCounting', 'mog2LearningRate') self.kernelSize = parser.getint('PeopleCounting', 'kernelSize') self.scale = parser.getfloat('PeopleCounting', 'scale') self.areaThreshold = math.pi * parser.getfloat('PeopleCounting', 'areaRadius')**2 self.peopleBlobSize = parser.getint('PeopleCounting', 'peopleBlobSize') self.distThreshold = parser.getint('PeopleCounting', 'distThreshold') self.countingRegion = map(int, parser.get('PeopleCounting', 'countingRegion').split(',')) self.upperTrackingRegion = map(int, parser.get('PeopleCounting', 'upperTrackingRegion').split(',')) self.lowerTrackingRegion = map(int, parser.get('PeopleCounting', 'lowerTrackingRegion').split(',')) self.inactiveThreshold = parser.getint('PeopleCounting', 'inactiveThreshold') # self.singlePersonBlobSize = parser.getint('PeopleCounting', 'singlePersonBlobSize') self.Debug = parser.getboolean('PeopleCounting', 'Debug') self.Visualize = parser.getboolean('PeopleCounting', 'Visualize') or self.Debug self.useRatioCriteria = parser.getboolean('PeopleCounting', 'useRatioCriteria') self.RTSPurl = parser.get('PeopleCounting','RTSPurl') self.RTSPframerate = parser.getint('PeopleCounting','RTSPframerate') """ASSUMPTION: ppl entering door walk downards(direction = 1) in the video""" self.store_id = parser.getint('store', 'store_id') self.camera_id = parser.getint('store', 'camera_id') self.ipc_username = parser.get('store', 'ipc_username') self.ipc_password = parser.get('store', 'ipc_password') self.wl_dev_cam_id = parser.get('store', 'wl_dev_cam_id')
def get(name, rc_file='~/.odoorpcrc'): """Return the session configuration identified by `name` from the `rc_file` file. >>> import odoorpc >>> from pprint import pprint as pp >>> pp(odoorpc.session.get('foo')) # doctest: +SKIP {'database': 'db_name', 'host': 'localhost', 'passwd': 'password', 'port': 8069, 'protocol': 'jsonrpc', 'timeout': 120, 'type': 'ODOO', 'user': '******'} .. doctest:: :hide: >>> import odoorpc >>> session = '%s_session' % DB >>> odoo.save(session) >>> data = odoorpc.session.get(session) >>> data['host'] == HOST True >>> data['protocol'] == PROTOCOL True >>> data['port'] == int(PORT) True >>> data['database'] == DB True >>> data['user'] == USER True >>> data['passwd'] == PWD True >>> data['type'] == 'ODOO' True :raise: `ValueError` (wrong session name) """ conf = ConfigParser() conf.read([os.path.expanduser(rc_file)]) if not conf.has_section(name): raise ValueError( "'%s' session does not exist in %s" % (name, rc_file)) return { 'type': conf.get(name, 'type'), 'host': conf.get(name, 'host'), 'protocol': conf.get(name, 'protocol'), 'port': conf.getint(name, 'port'), 'timeout': conf.getint(name, 'timeout'), 'user': conf.get(name, 'user'), 'passwd': conf.get(name, 'passwd'), 'database': conf.get(name, 'database'), }
def __init__(self, filename): cp = SafeConfigParser() cp.read(filename) self.address = cp.get('accel', 'address') or '0.0.0.0' self.port = cp.getint('accel', 'port') or 21212 self.refresh = cp.getint('accel', 'refresh') or 300 self.protocol = cp.get('database', 'protocol') self.database = cp.get('database', 'database') self.hostname = cp.get('database', 'hostname') self.username = cp.get('database', 'username') self.password = cp.get('database', 'password')
def read_conf_general(self, forcedcustom=''): parser = SafeConfigParser() parser.read(self.dirconf + 'builtin_params.ini') portno = parser.getint('general', 'port') gen_user = parser.get('general', 'general_user') gen_pwd = parser.get('general', 'general_pwd') config_user = parser.get('general', 'config_user') config_pwd = parser.get('general', 'config_pwd') gen_https = parser.getint('general', 'general_https') gen_trd = parser.getint('general', 'trends') gen_timeout = int(parser.get('general', 'default_timeout')) gen_cacheage = int(parser.get('general', 'max_cache_age')) gen_cacheqty = int(parser.get('general', 'max_cache_qty')) gen_log_size = int(parser.get('general', 'max_log_size')) gen_log_backupcount = int(parser.get('general', 'max_log_backupcount')) gen_seed_warptable = int(parser.get('general', 'seed_warptable')) gen_trends_refreshrate = int(parser.get('general', 'trends_refreshrate')) gen_motd = parser.get('general', 'motd') gen_stats_key = parser.get('general', 'stats_key') gen_tslow = int(parser.get('general', 'timeout_slow')) gen_tfast = int(parser.get('general', 'timeout_fast')) gen_sugg = parser.getint('general', 'search_suggestions') gen_search_default = parser.get('general', 'search_default') gen_trends_qty = parser.getint('general', 'trends_qty') smartsearch = parser.getint('general', 'smartsearch') use_warp = parser.getint('general', 'use_warp') cache_active = parser.getint('general', 'cache_active') searchaddontxt = parser.get('general', 'searchaddontxt') daysretention = parser.getint('general', 'daysretention') revproxy = parser.get('general', 'revproxy') self.cgen = {'portno': portno, 'general_usr': gen_user, 'general_pwd': gen_pwd, 'general_trend': gen_trd, 'config_user': config_user, 'config_pwd': config_pwd, 'smartsearch': smartsearch, 'use_warp' : use_warp, 'searchaddontxt': searchaddontxt, 'daysretention': daysretention, 'general_suggestion': gen_sugg, 'general_https': gen_https, 'cache_active': cache_active, 'general_ipaddress': '', 'default_timeout': gen_timeout, 'timeout_class': [gen_tfast, gen_timeout, gen_tslow], 'max_cache_age': gen_cacheage, 'log_backupcount': gen_log_backupcount, 'max_cache_qty': gen_cacheqty, 'log_size': gen_log_size, 'seed_warptable': gen_seed_warptable, 'trends_refreshrate': gen_trends_refreshrate, 'search_default': gen_search_default, 'trends_qty': gen_trends_qty, 'sabnzbd_url': '', 'sabnzbd_api': '', 'nzbget_url': '', 'nzbget_user': '', 'nzbget_pwd': '', 'nzbget_scheme': '', 'nzbget_url_port': '', 'general_apikey': '', 'general_restrictopt1': 0, 'general_dereferer': 0, 'predb_active': 1, 'revproxy': revproxy, 'stats_key': gen_stats_key, 'motd': gen_motd} self.selectable_speedopt = copy.deepcopy(self.selectable_speedopt_cpy) self.selectable_speedopt[0][1] += ' [' + str(self.cgen['timeout_class'][1]) + 's]' self.selectable_speedopt[1][1] += ' [' + str(self.cgen['timeout_class'][2]) + 's]'
def createGenericServerCls(path, filename, conf): """Create a ServerProcess class representing a generic server. Options for this server are passed in as a string in standard .ini format. We use a string rather than a file to allow this configuration to be extracted from a larger file if necessary. """ class cls(ServerProcess): pass scp = SafeConfigParser() scp.readfp(StringIO.StringIO(conf)) # general information cls.name = scp.get('info', 'name', raw=True) cls.__doc__ = scp.get('info', 'description', raw=True) if scp.has_option('info', 'version'): cls.version = scp.get('info', 'version', raw=True) else: cls.version = '0.0' try: cls.instancename = scp.get('info', 'instancename', raw=True) except: cls.instancename = cls.name cls.environVars = findEnvironmentVars(cls.instancename) cls.isLocal = len(cls.environVars) > 0 # startup platform_cmdline_option = 'cmdline_{}'.format(sys.platform) if scp.has_option('startup', platform_cmdline_option): # use platform-specific command line cls.cmdline = scp.get('startup', platform_cmdline_option, raw=True) else: # use generic command line cls.cmdline = scp.get('startup', 'cmdline', raw=True) cls.path = path cls.filename = filename try: cls.timeout = float(scp.getint('startup', 'timeout')) except: pass # shutdown if scp.has_option('shutdown', 'message'): cls.shutdownMode = 'message', int(scp.get('shutdown', 'message', raw=True)) elif scp.has_option('shutdown', 'setting'): cls.shutdownMode = 'setting', scp.get('shutdown', 'setting', raw=True) try: cls.shutdownTimeout = float(scp.getint('shutdown', 'timeout')) except: pass return cls
class DefaultProperties(): def __init__(self): self.currentDesktop = getCurrentUsersDesktopPath() self.appdata = getCurrentUsersAppDataPath() self.templatebase = os.path.join(self.appdata, "maestro", "data") defaults = { "basedir": self.currentDesktop, "region_leaf_base": "IzR1.09", "starting_udp_port": 9020, "starting_http_port": 9500, "template_basedir": os.path.join(self.templatebase, "templates"), "backend_subnet": "10.0.0.0/20", "max_region_slots": 16, } self.configParser = SafeConfigParser(defaults) @property def backend_ip(self): return self.hostingResource.internalIp @property def frontend_ip(self): return self.hostingResource.externalIp @property def hostName(self): return self.hostingResource.hostName def loadConfiguration(self, filePath=None): if (filePath == None): filePath = os.path.join(self.appdata, product_name(), "maestro.config") if (os.path.exists(filePath)): self.configParser.read(filePath) #after configuration loading, grab our hosting compute resource info self.loadHostInformationFromEnvDB() #set the appropriate values in the config self.configParser.set("maestro", "frontend_ip", self.frontend_ip) self.configParser.set("maestro", "backend_ip", self.backend_ip) self.configParser.set("maestro", "hostname", self.hostName) def loadHostInformationFromEnvDB(self): envDbConfig = self.getEnvironmentDbConfig() resource = None for ip in GetPrivateIPs(): resource = ComputeResource.findByInternalIp(envDbConfig, ip) if resource != None: break if resource == None: raise RuntimeError( "No hosting resource entry found matching any assigned private IP" ) self.hostingResource = resource def getValue(self, name, section="maestro"): return self.configParser.get(section, name) def getInteger(self, name, section="maestro"): return self.configParser.getint(section, name) def getBoolean(self, name, section="maestro"): return self.configParser.getboolean(section, name) def getFloat(self, name, section="maestro"): return self.configParser.getfloat(section, name) def getCoreDbConfig(self): section = "maestro" config = { 'user': self.configParser.get(section, "core_db_userid"), 'password': self.configParser.get(section, "core_db_password"), 'host': self.configParser.get(section, "core_db_host"), 'database': self.configParser.get(section, "core_db_database"), 'raise_on_warnings': False, } return config def getEnvironmentDbConfig(self): section = "maestro" config = { 'user': self.configParser.get(section, "env_db_userid"), 'password': self.configParser.get(section, "env_db_password"), 'host': self.configParser.get(section, "env_db_host"), 'database': self.configParser.get(section, "env_db_database"), 'raise_on_warnings': False, } return config def getGridShareCredentials(self): section = "maestro" username = self.configParser.get(section, "gridshare_user") password = self.configParser.get(section, "gridshare_pass") return (username, password) def getTransferShareCredentials(self): section = "maestro" username = self.configParser.get(section, "transfer_share_user") password = self.configParser.get(section, "transfer_share_pass") return (username, password) def getItems(self, section="maestro", vars={}): replacements = {} for key in vars.keys(): replacements[str(key)] = str(vars[key]) keys = self.configParser.items(section, False, replacements) return keys
def __init__(self): # config stuff config = SafeConfigParser() config.read('indexing/utilities/config.ini') es_host = config.get('elasticsearch', 'HOST') es_port = config.getint('elasticsearch', 'PORT') es_index_name = config.get('elasticsearch', 'PROCESS_INDEX') es_doc_type = config.get('elasticsearch', 'PROCESS_DOC_TYPE') es = Elasticsearch([{'host': es_host, 'port': es_port}]) # es_host = "localhost" # es_port = "9200" # es_index_name = "data_features" # es = Elasticsearch([{'host': es_host, 'port': es_port}]) res = es.search(index=es_index_name, body={"size": 100, "query": {"match_all": {}}}) res = res['hits']['hits'] yellow_cards = [] total_shot_ratio = [] fantasy_points_per_game = [] influence = [] fantasy_cost_change = [] shoot_percentage = [] selected_percentage = [] shots_on_target = [] in_dreamteam = [] minutes_played = [] creativity = [] ict_index = [] fantasy_total_points = [] form = [] bonus = [] assists = [] ea_index = [] fantasy_transfers_out_in = [] dreamteam_count = [] save_percentage = [] pdo = [] threat = [] red_cards = [] goals_scored = [] home = [] away = [] score = [] for item in res: yellow_cards.append(item['_source']['yellow_cards']) total_shot_ratio.append(item['_source']['total_shot_ratio']) fantasy_points_per_game.append(item['_source']['fantasy_points_per_game']) influence.append(item['_source']['influence']) fantasy_cost_change.append(item['_source']['fantasy_cost_change']) shoot_percentage.append(item['_source']['shoot_percentage']) selected_percentage.append(item['_source']['selected_percentage']) shots_on_target.append(item['_source']['shots_on_target']) in_dreamteam.append(item['_source']['in_dreamteam']) minutes_played.append(item['_source']['minutes_played']) creativity.append(item['_source']['creativity']) ict_index.append(item['_source']['ict_index']) fantasy_total_points.append(item['_source']['fantasy_total_points']) form.append(item['_source']['form']) bonus.append(item['_source']['bonus']) assists.append(item['_source']['assists']) ea_index.append(item['_source']['ea_index']) fantasy_transfers_out_in.append(item['_source']['fantasy_transfers_out_in']) dreamteam_count.append(item['_source']['dreamteam_count']) save_percentage.append(item['_source']['save_percentage']) pdo.append(item['_source']['pdo']) threat.append(item['_source']['threat']) red_cards.append(item['_source']['red_cards']) goals_scored.append(item['_source']['goals_scored']) home.append(item['_source']['home']) away.append(item['_source']['away']) score.append(item['_source']['score']) d = {'yellow_cards': yellow_cards, 'total_shot_ratio': total_shot_ratio, 'fantasy_points_per_game': fantasy_points_per_game, 'influence': influence, 'fantasy_cost_change': fantasy_cost_change, 'shoot_percentage': shoot_percentage, 'selected_percentage': selected_percentage, 'shots_on_target': shots_on_target, 'in_dreamteam': in_dreamteam, 'minutes_played': minutes_played, 'ict_index': ict_index, 'fantasy_total_points': fantasy_total_points, 'form': form, 'bonus': bonus, 'assists': assists, 'ea_index': ea_index, 'fantasy_transfers_out_in': fantasy_transfers_out_in, 'dreamteam_count': dreamteam_count, 'save_percentage': save_percentage, 'pdo': pdo, 'threat': threat, 'red_cards': red_cards, 'goals_scored': goals_scored, 'home': home, 'away': away, 'score': score} self.df = pd.DataFrame(d) # self.df.loc[self.df["score"] > 0, "score"] = 1 # self.df.loc[self.df["score"] == 0, "score"] = 0 # self.df.loc[self.df["score"] < 0, "score"] = -1 # print self.df self.predictors = ["pdo", "shots_on_target","total_shot_ratio","yellow_cards","fantasy_points_per_game", "influence", "ict_index", "form", "ea_index", "threat", "goals_scored"] self.alg = RandomForestClassifier(random_state=1, n_estimators=50, min_samples_split=7, min_samples_leaf=4) self.process = team_data_processing(es_host, es_port)
def loadProject(self): """Load any project data to initialize (in development) This should implemented and documented somewhere else, but for now, let's start it here. FILE FORMAT name: experiment.cfg sections: [load] material-list = <filename> reader-list = <filename> [options] # options for this experiment """ cfgFile = 'experiment.cfg' exp = wx.GetApp().ws # if os.path.exists(cfgFile): p = Parser() p.read(cfgFile) # # Each section defines a material # sec = 'load' if p.has_section(sec): opt = 'material-list' try: fname = p.get(sec, opt) exp.loadMaterialList(fname) print 'loaded materials list from "%s"\n' % fname except: wx.MessageBox('failed to autoload materials list') pass opt = 'reader-list' try: fname = p.get(sec, opt) exp.loadReaderList(fname) print 'loaded readers list from "%s"\n' % fname except: wx.MessageBox('failed to autoload readers list') pass opt = 'detector' try: fname = p.get(sec, opt) exp.loadDetector(fname) print 'loaded detector from "%s"\n' % fname except: wx.MessageBox('failed to autoload detector list') pass pass sec = 'options' if p.has_section(sec): opt = 'start-tab' try: val = p.getint(sec, opt) self.nBook.SetSelection(val) print 'starting on tab number %d\n' % val except: pass pass return
def main_(): # todo, add signal first signal.signal(signal.SIGINT, handler_stop) signal.signal(signal.SIGTERM, handler_stop) signal.signal(signal.SIGHUP, handler_stop) parser = create_parser() opt = parser.parse_args() conf = SafeConfigParser() conf.readfp(opt.conf) output_service_conf = dict(conf.items('source')) input_service_conf = dict(conf.items('destination')) if conf.has_option('common', 'threads'): _threads = conf.getint('common', 'threads') else: _threads = 10 workspace_ = conf.get('common', 'workspace') try: os.makedirs(workspace_) except OSError: pass log_config['handlers']['error_file']['filename'] = path.join( workspace_, 'failed_files.txt') dictConfig(log_config) loads_services() output_service = services_[output_service_conf['type']]( **output_service_conf) input_service = services_[input_service_conf['type']](**input_service_conf) work_dir = conf.get('common', 'workspace') # init share queue and lock, queue is for running task, lock is for leveldb filter share_queue = multiprocessing.Queue() lock = multiprocessing.Lock() # init restore process restore_process = multiprocessing.Process(target=restore_check_thread, name="restore_check_worker", args=(share_queue, lock, work_dir, output_service, input_service)) restore_process.daemon = True restore_process.start() # init work process pool threads_pool = [] limit = max([_threads, multiprocessing.cpu_count()]) for i in range(limit): p = multiprocessing.Process(target=work_thread, name="running_task_worker", args=(share_queue, lock, work_dir, output_service, input_service)) p.daemon = True threads_pool.append(p) start_pool(threads_pool) while True: global stop if stop: logger.info("main process stop is true, will exit") break # check child process if restore_process.is_alive(): logger.info("main process, sleep 3 seconds") time.sleep(3) else: logger.info( "restore_check_process is not alive, maybe normally exit, " "so main process will normally exit too") time.sleep(6) restore_process.join() signal_pool(threads_pool) wait_pool(threads_pool) logger.info("main process: %d, is exit normally", os.getpid()) sys.exit(0) pass # todo, term signal quit, sleep a few more seconds time.sleep(6) restore_process.join() wait_pool(threads_pool) logger.info("main process: %d, is exit signal", os.getpid()) pass
if (V >= 3): from configparser import SafeConfigParser parser = SafeConfigParser() name = 'kmax1_example.ini' parser.read(name) k_max = parser.getfloat('floats', 'k_max') k_min = parser.getfloat('floats', 'k_min') step = parser.getfloat('floats', 'step') max = parser.getfloat('floats', 'max') P_right = parser.getfloat('floats', 'P_w_right') P_left = parser.getfloat('floats', 'P_w_left') C_window = parser.getfloat('floats', 'C_window') n_pad = parser.getint('integers', 'n_pad') down_sample = parser.getint('integers', 'down_sample') read_name = parser.get('files', 'in_file') name = parser.get('files', 'out_file') d = np.loadtxt(read_name) # load data k = d[:, 0] P = d[:, 1] id = np.where((k >= k_min) & (k <= k_max))[0] k = k[id] P = P[id] k = k[::down_sample] P = P[::down_sample]
parser.add_argument('--idb', nargs=1, action='store') args = parser.parse_args() if not args.idb: err_log('[sync] no idb argument') try: for loc in ('IDB_PATH', 'USERPROFILE', 'HOME'): if loc in os.environ: confpath = os.path.join(os.path.realpath(os.environ[loc]), '.sync') if os.path.exists(confpath): config = SafeConfigParser({'port': PORT, 'host': HOST}) config.read(confpath) if config.has_section('INTERFACE'): PORT = config.getint('INTERFACE', 'port') HOST = config.get('INTERFACE', 'host') break except Exception as e: err_log('failed to load configuration file') server = BrokerSrv(args.idb[0]) try: server.bind() except socket.error as e: server.announcement('failed to bind') err_log('server.bind error') try: server.notify()
required=True, help='command to be sent enclosed by single or double quotes', dest='command') args = parser.parse_args() # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # CONFIGURATION PARAMETERS parser = SafeConfigParser() parser.readfp(args.config_file) # RabbitMQ server IP address rmq_ip = parser.get('general', 'rmq_ip') # RabbitMQ server port number rmq_port = parser.getint('general', 'rmq_port') # queue name (storage node hostname) rmq_queue = parser.get('general', 'rmq_queue') # RabbitMQ authentication rmq_username = parser.get('general', 'rmq_username') rmq_password = parser.get('general', 'rmq_password') # SSL options rmq_ssl = parser.getboolean('general', 'rmq_ssl') rmq_ssl_options = parser.get('general', 'rmq_ssl_options') # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # discard error messages
raise ValueError( "--exclude is larger than --window. That doesn't make sense") #================================================= config = SafeConfigParser() config.read(opts.config) conf = config.getfloat('general', 'conf') ifo = config.get('general', 'ifo') #=========== kwgdsdir = config.get("kleinewelle", "gdsdir") kwbasename = config.get("kleinewelle", "basename") kwstride = config.getint("kleinewelle", "stride") kwchannels = config.get("kleinewelle", "channels").split() for chan in kwchannels: if not config.has_section(chan): raise ValueError("no section for channel=%s found in %s" % (chan, opts.config)) #=========== oogdsdir = config.get('OfflineOmicron', 'gdsdir') oochannels = config.get('OfflineOmicron', 'channels').split() for chan in oochannels: if not config.has_section(chan): raise ValueError('no section for channel=%s in %s' % (chan, opts.config))
def getint(key): try: SafeConfigParser.getint(self, *key.split("|", 1)) except: pass
user2 = 20 """) sys.exit(1) if sys.argv[1].strip().upper() == 'PASS': import getpass pwd = getpass.getpass() print sha256(pwd).hexdigest() sys.exit(0) config = SafeConfigParser() config.read(sys.argv[1]) SERVER_HOST = config.get('server', 'host') SERVER_PORT = config.has_option('server', 'port') and config.getint( 'server', 'port') or 119 SERVER_USER = config.get('server', 'login') SERVER_PASS = config.get('server', 'password') SERVER_SSL = config.has_option('server', 'use ssl') and config.getboolean( 'server', 'use ssl') or False SERVER_CONNECTIONS = config.has_option('server', 'max connections') and config.getint( 'server', 'max connections') or 5 PROXY_SSL = config.has_option('proxy', 'use ssl') and config.getboolean( 'proxy', 'use ssl') or False PROXY_CERT_PEM = config.has_option('proxy', 'cert file') and config.get( 'proxy', 'cert file', '').strip() or '' PROXY_CERT_KEY = config.has_option('proxy', 'cert key') and config.get( 'proxy', 'cert key').strip() or '' PROXY_CA_VERIFY = config.has_option('proxy',
def main(): """Main.""" if len(sys.argv) != 2: usage() sys.exit(-1) # -------------------------------------------------------------------------- # Parse the parameters. config = SafeConfigParser() config.read(sys.argv[1]) database_file = config.get('input', 'database_file') year_val = config.getint('input', 'year') month_str = config.get('input', 'month') stop_file = config.get('input', 'stopwords') remove_singletons = config.getboolean('input', 'remove_singletons') build_images = {} build_images['rgb'] = config.getboolean('input', 'build_rgb_images') build_images['grey'] = config.getboolean('input', 'build_grey_images') build_csv_files = config.getboolean('input', 'build_csv_files') full_users_only = config.getboolean('input', 'full_users') # XXX: If full_users_only is not set to True, the images and such have # varying dimensions... which is bad. So, there is a bug here and I have # yet to fully investigate it. if month_str not in MONTHS: usage() sys.exit(-2) output_set = {} for section in config.sections(): if section.startswith("run"): output_folder = config.get(section, 'output_folder') output_set[section] = \ Output( output_folder, config.getint(section, 'request_value')) try: stat(output_folder) except OSError: mkdir(output_folder) # -------------------------------------------------------------------------- # Pull stop words stopwords = import_stopwords(stop_file) kickoff = \ """ ------------------------------------------------------------------- parameters : database : %s date : %s output : %s stop : %s count : %s remove : %s output : %s full only : %s ------------------------------------------------------------------- """ print kickoff % \ (database_file, (month_str, year_val), str([output_set[output].get_folder() for output in output_set]), stop_file, str([output_set[output].get_request() for output in output_set]), remove_singletons, build_images, full_users_only) # now that it's an integer lookup that can be more readily searched and # indexed versus a text field search with like. query_prefetch = \ "select owner, created, contents as text from tweets where yyyymm = %d;" # -------------------------------------------------------------------------- # Build a set of documents, per user, per day. num_days = monthrange(year_val, int(MONTHS[month_str]))[1] user_data = \ data_pull( database_file, query_prefetch % \ int(str_yearmonth(year_val, int(MONTHS[month_str])))) if len(user_data) < 2: print "empty dataset." sys.exit(-3) # you want full users only if you're running matrix completion stuff. if full_users_only: users = frame.find_full_users(user_data, stopwords, num_days) else: users = frame.find_valid_users(user_data, stopwords) print "data pulled" print "user count: %d\tframe users: %d" % (len(user_data), len(users)) # this is only an issue at present. mind you, because for the video # analysis code the users don't have to be full. if len(users) < 2: print "no full users" sys.exit(-4) # -------------------------------------------------------------------------- # I don't build a master tf-idf set because the tf-idf values should... # evolve. albeit, I don't think I'm correctly adjusting them -- I'm just # recalculating then. # # Calculate daily tf-idf; then build frame from top terms over the period # of days. frames = {} for day in range(1, num_days + 1): # This is run once per day overall. frames[day] = frame.build_full_frame(users, user_data, day) frames[day].calculate_tfidf(stopwords, remove_singletons) if frames[day].tfidf_len() == 0: print "weird data error." sys.exit(-5) # This is run once per day per output. for output in output_set: out = output_set[output] out.add_terms(frames[day].top_terms_overall(out.get_request())) # get_range() is just whatever the last time you ran # top_terms_overall new_range = frames[day].get_range() # This way the images are created with the correct range to cover # all of them. if out.max_range < new_range: out.max_range = new_range #break #if day == 3: #break # just do first day. print "Frames created" # len(overall_terms) should be at most 250 * num_users * num_days -- if # there is no overlap of high value terms over the period of days between # the users. If there is literally no overlap then each user will have # their own 250 terms each day. # -------------------------------------------------------------------------- # Dump the matrix. output_matrix(frames, output_set, build_csv_files, build_images)
"""Parses "parsers" config value""" strRRs = re.split(r",\s*", configLine) self.parserClasses = [self.name2class[rr] for rr in strRRs] if __name__ == '__main__': if len(sys.argv) != 3: print >> sys.stderr, "ERROR: usage: <domain_file> <scraper_config>" sys.exit(1) domainFilename = sys.argv[1] domainFile = file(domainFilename) scraperConfig = SafeConfigParser() scraperConfig.read(sys.argv[2]) threadCount = scraperConfig.getint("processing", "scan_threads") # prefix/schema to use in DB: prefix = "" if scraperConfig.has_option("database", "prefix"): prefix = scraperConfig.get("database", "prefix") sourceEncoding = "utf-8" if scraperConfig.has_option("dns", "source_encoding"): sourceEncoding = scraperConfig.get("dns", "source_encoding") #DNS resolution options taFile = scraperConfig.get("dns", "ta_file") opts = DnsConfigOptions(scraperConfig) if opts.unboundConfig: ub_ctx_config(opts.unboundConfig)
'PORT': PORT, 'remoteHosts': remoteHosts, 'parakeet_url': parakeet_url, 'parakeet_passcode': parakeet_passcode, 'use_raspberry_pi_internal_serial_port': False, 'DEFAULT_LOG_FILE': DEFAULT_LOG_FILE }) # script should be python-usb-wixel.py and then config file will be python-usb-wixel.cfg config_path = re.sub(r".py$", ".cfg", os.path.realpath(__file__)) if (os.path.isfile(config_path)): config.read(config_path) print "Loading configuration from: " + config_path HOST = config.get('main', 'HOST').strip() PORT = config.getint('main', 'PORT') remoteHosts = config.get('main', 'remoteHosts').strip() parakeet_url = config.get('main', 'parakeet_url').strip() parakeet_passcode = config.get('main', 'parakeet_passcode').strip() try: use_raspberry_pi_internal_serial_port = config.getboolean( 'main', 'use_raspberry_pi_internal_serial_port') except: use_raspberry_pi_internal_serial_port = False DEFAULT_LOG_FILE = config.get('main', 'DEFAULT_LOG_FILE').strip() else: print "No custom config file: " + config_path # remoteHosts is now specified as , separated string then converted to old style list if (len(remoteHosts) > 0): remoteHosts = remoteHosts.split(',')
class TrafficTweet(): def __init__(self): # Kludge to silent warnings in python-twitter, not needed for Python versions > 2.7.9 # See https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning.InsecurePlatformWarning urllib3.disable_warnings() self.up = 0 self.down = 0 self.parser = SafeConfigParser() self.parser.read('traffic_collector.conf') self.consumer_key = self.parser.get('twitter', 'consumer_key') self.consumer_secret = self.parser.get('twitter', 'consumer_secret') self.access_key = self.parser.get('twitter', 'access_key') self.access_secret = self.parser.get('twitter', 'access_secret') self.api = twitter.Api(consumer_key=self.consumer_key, consumer_secret=self.consumer_secret, access_token_key=self.access_key, access_token_secret=self.access_secret) @staticmethod def _scale(val): if val >= 1024**2: return val / 1024**2, "(%.1f GB)" elif val >= 1024: return val / 1024, "(%.1f MB)" else: val = 0 if val < 0 else val # Avoid negative values return val, "(%.0f kB)" def update(self, up, down): self.parser.read('traffic_collector.conf') tweet_limit = self.parser.getint('twitter', 'limit') # Limit in MB # up and down in kB up = float(up) down = float(down) do_tweet = True if ( (up + down) - (self.up + self.down)) > (tweet_limit * 1024) else False diff_up = up - self.up diff_down = down - self.down diff_sum = diff_up + diff_down diff_up, diff_up_str = self._scale(diff_up) diff_down, diff_down_str = self._scale(diff_down) diff_sum, diff_sum_str = self._scale(diff_sum) # Avoid tweet if first time (self.up/down == 0) # Avoid tweet to be duplicate of previous => error by including date string if (self.up != 0 and self.down != 0) and do_tweet: status = self.api.PostUpdate( ("%s @ecsmame #B593 Traffic: uplink " + diff_up_str + " downlink " + diff_down_str + " total " + diff_sum_str) % (datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), diff_up, diff_down, diff_sum)) else: status = None self.up = up self.down = down return status def daily_update(self, t, up, down): # Provide a daily summary of volume # Avoid tweet to be duplicate of previous => error by including date string status = None dt = datetime.datetime.fromtimestamp(int( t / 1000)).timetuple() # To time_struct if dt.tm_hour == 1: # any time during the first hour of the day up = float(up) down = float(down) start_time = t - (24 * 60 * 60 * 1000) # 24 hours before t client = MongoClient() db = client.Router coll = db.B593 post = coll.find_one({"Time": {"$gte": start_time}}) diff_up = up - post['UplinkVolume'] diff_down = down - post['DownlinkVolume'] diff_sum = diff_up + diff_down diff_up, diff_up_str = self._scale(diff_up) diff_down, diff_down_str = self._scale(diff_down) diff_sum, diff_sum_str = self._scale(diff_sum) # Calculate start_time as first time of month start_time = (datetime.datetime(dt.tm_year, dt.tm_mon, day=1) - datetime.datetime(1970, 1, 1)).total_seconds() * 1000 post = coll.find_one({"Time": {"$gte": start_time}}) if post is None: # First day of month, current value not synced to DB if not quiet: app_log.warning('Null post at monthly tweet') diff_month_up = 0 diff_month_down = 0 else: diff_month_up = up - post['UplinkVolume'] diff_month_down = down - post['DownlinkVolume'] diff_month_sum = diff_month_up + diff_month_down diff_month_left = (40 * 1024**2) - diff_month_sum diff_month_up, diff_month_up_str = self._scale(diff_month_up) diff_month_down, diff_month_down_str = self._scale(diff_month_down) diff_month_sum, diff_month_sum_str = self._scale(diff_month_sum) diff_month_left, diff_month_left_str = self._scale(diff_month_left) status = self.api.PostUpdate( ("Daily %s @ecsmame #B593 up " + diff_up_str + " down " + diff_down_str + " total " + diff_sum_str) % (datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), diff_up, diff_down, diff_sum)) status = self.api.PostUpdate( ("Monthly %s @ecsmame #B593 up " + diff_month_up_str + " down " + diff_month_down_str + " total " + diff_month_sum_str + " left " + diff_month_left_str) % (datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), diff_month_up, diff_month_down, diff_month_sum, diff_month_left)) else: status = None return status
def __init__(self, options): self.vdr_running = False self.options = options self.updateJob = None parser = SafeConfigParser() parser.optionxform = unicode with codecs.open(self.options.config, 'r', encoding='utf-8') as f: parser.readfp(f) configdir = os.path.dirname(self.options.config) parser.read([ os.path.join(configdir, 'staticmount.cfg'), os.path.join(configdir, 'localdirs.cfg') ]) if parser.has_option('targetdirs', 'media'): self.mediadir = parser.get('targetdirs', 'media') else: self.mediadir = "/tmp" if parser.has_option('targetdirs', 'vdr'): self.vdrdir = parser.get('targetdirs', 'vdr') else: self.vdrdir = "/tmp" if parser.has_option('options', 'autofsdir'): self.autofsdir = parser.get('options', 'autofsdir') else: self.autofsdir = "/net" if parser.has_option('options', 'use_i18n'): self.use_i18n = parser.getboolean('options', 'use_i18n') else: self.use_i18n = False if parser.has_option('options', 'nfs_suffix'): self.nfs_suffix = parser.get('options', 'nfs_suffix') else: self.nfs_suffix = "" if parser.has_option('options', 'static_suffix'): self.static_suffix = parser.get('options', 'static_suffix') else: self.static_suffix = "" if parser.has_option('options', 'dbus2vdr'): self.dbus2vdr = parser.getboolean('options', 'dbus2vdr') else: self.dbus2vdr = False if parser.has_option('options', 'extradirs'): self.extradirs = parser.getboolean('options', 'extradirs') else: self.extradirs = False if parser.has_option('options', 'svdrp_port'): self.svdrp_port = parser.getint('options', 'svdrp_port') else: self.svdrp_port = 6419 if parser.has_option('options', 'ip_whitelist'): ip_whitelist = parser.get('options', 'ip_whitelist').split() self.ip_whitelist = [] for ip in ip_whitelist: try: self.ip_whitelist.append(ipaddr.IPNetwork(ip)) except error as e: logging.error("malformed ip range/address: {0}".format(ip)) logging.error(e) else: self.ip_whitelist = [ ipaddr.IPNetwork(u'0.0.0.0/0'), ipaddr.IPNetwork(u'0::0/0') ] if parser.has_option('options', 'ip_blacklist'): ip_blacklist = parser.get('options', 'ip_blacklist').split() self.ip_blacklist = [] for ip in ip_blacklist: try: self.ip_blacklist.append(ipaddr.IPNetwork(ip)) except error as e: logging.error("malformed ip range/address: {0}".format(ip)) logging.error(e) else: self.ip_blacklist = [] self.localdirs = {} self.mediastaticmounts = {} if parser.has_section('localdirs'): for subtype, directory in parser.items('localdirs'): self.localdirs[subtype] = directory if parser.has_section('media_static_mount'): for subtype, directory in parser.items('media_static_mount'): self.mediastaticmounts[subtype] = directory self.vdrstaticmounts = {} if parser.has_section("vdr_static_mount"): for subtype, directory in parser.items('vdr_static_mount'): self.vdrstaticmounts[subtype] = directory if parser.has_option('Logging', 'use_file'): self.log2file = parser.getboolean('Logging', 'use_file') else: self.log2file = False if parser.has_option('Logging', 'logfile'): self.logfile = parser.get('Logging', 'logfile') else: self.logfile = "/tmp/avahi-mounter.log" if parser.has_option('Logging', 'loglevel'): self.loglevel = parser.get('Logging', 'loglevel') else: self.loglevel = "DEBUG" self.hostname = socket.gethostname() if self.log2file: logging.basicConfig( filename=self.logfile, level=getattr(logging, self.loglevel), format='%(asctime)-15s %(levelname)-6s %(message)s', #StreamHandler() ) else: logging.basicConfig( level=getattr(logging, self.loglevel), format='%(asctime)-15s %(levelname)-6s %(message)s', #StreamHandler() ) logging.info(u"Started avahi-linker") logging.debug(""" Config: media directory: {mediadir} VDR recordings: {vdrdir} autofs directory: {autofsdir} Local directories: {localdirs} VDR Static remote directories: {vdrstaticmounts} Media Static remote directories: {mediastaticmounts} use translations: {use_il8n} Suffix for NFS mounts: {nfs_suffix} use dbus2vdr: {dbus2vdr} use VDR extra dirs: {extradirs} SVDRP-Port: {svdrp_port} IP whitelist: {ip_whitelist} IP blacklist: {ip_blacklist} Hostname: {hostname} Log to file: {log2file} Logfile: {logfile} Loglevel: {loglevel} """.format(mediadir=self.mediadir, vdrdir=self.vdrdir, autofsdir=self.autofsdir, use_il8n=self.use_i18n, nfs_suffix=self.nfs_suffix, dbus2vdr=self.dbus2vdr, extradirs=self.extradirs, svdrp_port=self.svdrp_port, ip_whitelist=self.ip_whitelist, ip_blacklist=self.ip_blacklist, hostname=self.hostname, loglevel=self.loglevel, logfile=self.logfile, log2file=self.log2file, vdrstaticmounts=self.vdrstaticmounts, mediastaticmounts=self.mediastaticmounts, localdirs=self.localdirs))
def process_do_terms(species_ini_file): """ Function to read in config INI file and run the other functions to process DO terms. """ species_file = SafeConfigParser() species_file.read(species_ini_file) if not species_file.has_section('DO'): logger.error('Species INI file has no DO section, which is needed' ' to run the process_do_terms function.') sys.exit(1) sd_folder = species_file.get('species_info', 'SPECIES_DOWNLOAD_FOLDER') organism = species_file.get('species_info', 'SCIENTIFIC_NAME') do_obo_url = urlsplit(species_file.get('DO', 'DO_OBO_URL')) mim2gene_url = urlsplit(species_file.get('DO', 'MIM2GENE_URL')) genemap_url = urlsplit(species_file.get('DO', 'GENEMAP_URL')) xrdb = species_file.get('DO', 'XRDB') do_obo_filename = os.path.basename(do_obo_url.path) mim2gene_filename = os.path.basename(mim2gene_url.path) genemap_filename = os.path.basename(genemap_url.path) do_obo_file = os.path.join(sd_folder, 'DO', do_obo_filename) mim2gene_file = os.path.join(sd_folder, 'DO', mim2gene_filename) genemap_file = os.path.join(sd_folder, 'DO', genemap_filename) disease_ontology = go() loaded_obo_bool = disease_ontology.load_obo(do_obo_file) if loaded_obo_bool is False: logger.error('DO OBO file could not be loaded.') doid_omim_dict = build_doid_omim_dict(do_obo_file) mim2entrez_dict = build_mim2entrez_dict(mim2gene_file) mim_diseases = build_mim_diseases_dict(genemap_file, mim2entrez_dict) add_do_term_annotations(doid_omim_dict, disease_ontology, mim_diseases) disease_ontology.populated = True disease_ontology.propagate() tags_dictionary = None if species_file.has_option('DO', 'TAG_MAPPING_FILE'): tag_mapping_file = species_file.get('DO', 'TAG_MAPPING_FILE') do_id_column = species_file.getint('DO', 'DO_ID_COLUMN') do_name_column = species_file.getint('DO', 'DO_NAME_COLUMN') tag_column = species_file.getint('DO', 'TAG_COLUMN') header = species_file.getboolean('DO', 'TAG_FILE_HEADER') tags_dictionary = build_tags_dictionary( tag_mapping_file, do_id_column, do_name_column, tag_column, header) do_terms = [] for term_id, term in disease_ontology.go_terms.iteritems(): do_term = {} do_term['title'] = create_do_term_title(term) do_term['abstract'] = create_do_term_abstract(term, doid_omim_dict) do_term['xrdb'] = xrdb do_term['organism'] = organism do_term['slug'] = slugify(term_id + '-' + organism) do_term['annotations'] = {} for annotation in term.annotations: if annotation.gid not in do_term['annotations']: do_term['annotations'][annotation.gid] = [] else: do_term['annotations'][annotation.gid].append(annotation.ref) if do_term['annotations']: if tags_dictionary and term_id in tags_dictionary: do_term['tags'] = tags_dictionary[term_id]['gs_tags'] do_terms.append(do_term) return do_terms
from ConfigParser import SafeConfigParser import os import time import datetime from nanpy import (ArduinoApi, SerialManager, DallasTemperature) from alarms import sound, instapush settings = SafeConfigParser() settings.read( os.path.join(os.path.abspath(os.path.dirname(__file__)), 'config.cfg')) # Load settings device = settings.get('Arduino', 'SERIAL_PORT') pin_sound = settings.getint('Arduino', 'PIN_SOUND') pin_temp = settings.getint('Arduino', 'PIN_TEMPERATURE') app_id = settings.get('Instapush', 'INSTAPUSH_APP_ID') app_secret = settings.get('Instapush', 'INSTAPUSH_APP_SECRET') event_id = settings.get('Instapush', 'INSTAPUSH_EVENT_NAME') threshold = settings.getfloat('Fridge', 'THRESHOLD') notify_every_x_seconds = settings.getfloat('Fridge', 'NOTIFY_EVERY_X_SECONDS') write_log_every_x_measurements = 50 # Startup arduino connection connection = SerialManager(device=device) connection.open() arduino = ArduinoApi(connection=connection) temperature_sensors = DallasTemperature(connection=connection, pin=pin_temp) temperature_sensors.setResolution(12) # Mute sound by default arduino.pinMode(pin_sound, arduino.OUTPUT)
def main(): # Init Parser parser = SafeConfigParser() # Read configuration file parser.read("packages/system/config.ini") # Save the start automation time # This should always be the first task of the script, otherwise no protocol can be used setStartTime(parser) # Set working times (only if the script is the first time) setWorkingSchedule(parser) # Create private and break levels (only if the script is the first time) setRandomPrivateAndBreak(parser) # Load actions browsing = parser.getint("actions", "browsing") mailing = parser.getint("actions", "mailing") printing = parser.getint("actions", "printing") copyfiles = parser.getint("actions", "copyfiles") copysea = parser.getint("actions", "copysea") ssh = parser.getint("actions", "ssh") meeting = parser.getint("actions", "meeting") offline = parser.getint("actions", "offline") private = parser.getint("actions", "private") breaks = parser.getint("actions", "breaks") attacking = parser.getint("actions", "attacking") t = parser.getint("time", "counter") # Determine Subnet- and Host Part of the IP subnet, host, hostname = getAndSetSubnetHostAndHostname(parser) # Determine ID of the instance (MAC-Address as Integer) global myID if platform.system() == "Linux": myID = str(getnode()) else: # For Windows, something must be trickled, since getnode () returns an incorrect value hexMac = check_output(["getmac"])[162:180] hexMacNoDash = hexMac.replace("-", "") intMac = int(hexMacNoDash, 16) myID = str(intMac) # Fetch recent server config getCurrentServerConfig() # Read server config parser.read("packages/system/serverconfig.ini") # Configure the servers using the IPs in the ServerConfig file configServers(parser, subnet, host) # Set up a mount with the OpenStack server (to save the logs) configMountWithOpenStackServer() # Save the config.ini to the network drive saveConfigToServer() #echoC(__name__, "Call mainscript with %d, %d, %d, %d, %d, %d" %(actLvl, busLvl, act1, act2, act3, act4, t)) mainscript.init(browsing, mailing, printing, copyfiles, copysea, ssh, meeting, offline, private, breaks, attacking, t)
# load config file containing_dir = os.path.abspath(os.path.dirname(sys.argv[0])) cfg_file = SafeConfigParser() path_to_cfg = os.path.join(containing_dir, 'config.cfg') cfg_file.read(path_to_cfg) username = cfg_file.get('reddit', 'username') password = cfg_file.get('reddit', 'password') app_key = cfg_file.get('reddit', 'app_key') app_secret = cfg_file.get('reddit', 'app_secret') subreddit = cfg_file.get('reddit', 'subreddit') flair_db = cfg_file.get('trade', 'flair_db') posttitle_regex = cfg_file.get('post_check', 'posttitle_regex') timestamp_regex = cfg_file.get('post_check', 'timestamp_regex') rules = cfg_file.get('post_check', 'rules') upper_hour_buy = cfg_file.getint('post_check', 'upper_hour_buy') upper_hour_sell = cfg_file.getint('post_check', 'upper_hour_sell') flairs = ast.literal_eval(cfg_file.get('post_check', 'flairs')) # configure logging logger = LoggerManager().getLogger(__name__) # global vars lastid = "" # check to see if last posts conflict with current post (rule 2) def has_been_posted(id, lastpost, post, row, post_type): if row is not None: if not row[id]: lastid = ""
class Settings: def __init__(self, settings_type=None): """ Initialize a Settings object as public or private. """ if settings_type == 'private': c_path = os.path.join(api.application.root_path, "..", "settings_private.cfg") else: c_path = os.path.join(api.application.root_path, "..", "settings.cfg") # fail if the dir with settings.py does not have a settings.cfg if not os.path.isfile(c_path): raise OSError("%s: Settings file '%s' does not exist!" % (sys.argv[0], settings_abs_path)) self.config = SafeConfigParser() self.config.file_path = c_path self.config.readfp(open(self.config.file_path)) self.config.settings_type = settings_type self.load_api_keys() def load_api_keys(self): """ Looks for an API keys file and tries to read it. If it doesn't find one, it sets self.secret_keys to be an empty dict. """ self.api_keys = {} try: fh = file(self.get("api", "api_keys_file"), "rb") except: return False lines = fh.readlines() for line in lines: line = line.strip() key, ident = line.split("|~|") self.api_keys[key] = ident def get(self, section, key): """ Gets a value. Tries to do some duck-typing. """ raw_value = self.config.get(section, key) if raw_value in ["True", "False"]: return self.config.getboolean(section, key) elif key in ["log_level"]: exec "log_level_obj = logging.%s" % raw_value return log_level_obj else: try: return self.config.getint(section, key) except: pass return raw_value def jsonify(self): """ Renders the config object as JSON. """ d = {} for section in self.config.sections(): d[section] = {} for option in self.config.options(section): d[section][option] = self.get( section, option) # use the custom get() method self.config.json = json.dumps(d) def json_file(self): """ Returns a cStringIO object that looks like a file object. """ self.jsonify() s_file = cStringIO.StringIO() s_file.write(self.config.json) s_file.seek(0) return s_file
installPath = dirname(realpath(__file__)) configFilePath = join(installPath, configFile) if not isfile(configFilePath): print 'CRITICAL: The ' + configFile + ' file is missing. It should be in ' + installPath + '/' exit(1) cfg = SafeConfigParser() cfg.read(configFilePath) # Read in config settings to gloabl variables section = 'Logging' completeLog = cfg.get(section, 'completeLog') errorLog = cfg.get(section, 'errorLog') logToConsole = cfg.getboolean(section, 'logToConsole') cfgLogLevel = cfg.get(section, 'cfgLogLevel') log_max_size = cfg.getint(section, 'log_max_size') log_max_backup = cfg.getint(section, 'log_max_backup') section = 'Settings' name = cfg.get(section, 'name') port = cfg.getint(section, 'port') home = "http://" + cfg.get(section, 'home') section = 'DNSMasq' dnsmasq = cfg.get(section, 'dnsmasq') ns_file = join(dnsmasq, cfg.get(section, 'ns_file')) a_file = join(dnsmasq, cfg.get(section, 'a_file')) ptr_file = join(dnsmasq, cfg.get(section, 'ptr_file')) filters = {} key = DiffieHellman()
# SafeConfigParser does not make any attemp to understad the option type. # The application is expected to use teh correct method to fetch the value # as the desired type. # get() always returns a string. # Use getint() for integers, getfloat() for floating point numbers, and getboolean() for boolean types. from __future__ import print_function from ConfigParser import SafeConfigParser parser = SafeConfigParser() parser.read('types.ini') print('Integers:') for name in parser.options('ints'): string_value = parser.get('ints', name) value = parser.getint('ints', name) print(' %-12s : %-7r --> %d' %(name, string_value, value)) print('\nFloats:') for name in parser.options('floats'): string_value = parser.get('floats', name) value = parser.getfloat('floats', name) print(' %-12s : %-7r --> %0.2f' %(name, string_value, value)) print('\nBooleans:') for name in parser.options('booleans'): string_value = parser.get('booleans', name) value = parser.getboolean('booleans', name) print(' %-12s : %-7r --> %s' %(name, string_value, value))
rfh = RotatingFileHandler(log_file_path, maxBytes=1024 * 1024, backupCount=3) rfh.setLevel(loglevel) rfh.setFormatter(formatter) logger.addHandler(rfh) if __name__ == '__main__': setup_logging(cfg) cloud_enabled = cfg.getboolean("cloud", "enabled") cloud_server = cfg.get("cloud", "server") cloud_port = cfg.get("cloud", "port") cloud_url = cfg.get("cloud", "url") cloud_time_out = cfg.getint("cloud", "time_out") cloud = Cloud(cloud_enabled, cloud_server, cloud_port, cloud_url, timeout=cloud_time_out) enabled = cfg.getboolean("dweet", "enabled") dweet_server = cfg.get("dweet", "server") dweet_name = cfg.get("dweet", "name") dweet_time_out = cfg.getint("dweet", "time_out") dweet = Dweet(enabled, dweet_server, dweet_name, timeout=dweet_time_out) broker_host = cfg.get("mqtt_broker", "host") broker_port = cfg.get("mqtt_broker", "port")
import os, errno from ConfigParser import SafeConfigParser # Parse config file parser = SafeConfigParser() parser.read('config_EPIC_climatology.txt') TAG = parser.get('PROJECT', 'TAG') # Tag of SEIMF folder start_yr = parser.getint('PARAMETERS', 'START_YR') until_yr = parser.getint('PARAMETERS', 'UNTIL_YR') base_dir = parser.get('PATHS', 'base_dir') + os.sep epic_dir = base_dir + os.sep + 'EPIC' + os.sep + parser.get('PROJECT', 'project_name') + os.sep + \ parser.get('PROJECT', 'EPIC_dat') + parser.get('PROJECT', 'OUT_TAG') + os.sep wth_dir = epic_dir + os.sep + 'daily' out_dir = epic_dir + os.sep + 'climatology_' + str(start_yr) + '_' + str( until_yr) ############################################################################### # # # ############################################################################### def make_dir_if_missing(d): try: os.makedirs(d) except OSError as exception: if exception.errno != errno.EEXIST: raise
vibrating = current_time - last_vibration_time < 2 threading.Timer(1, heartbeat).start() if len(sys.argv) == 1: print "No config file specified" sys.exit() vibrating = False appliance_active = False last_vibration_time = time.time() start_vibration_time = last_vibration_time config = SafeConfigParser() config.read(sys.argv[1]) sensor_pin = config.getint('main', 'SENSOR_PIN') begin_seconds = config.getint('main', 'SECONDS_TO_START') end_seconds = config.getint('main', 'SECONDS_TO_END') pushbullet_api_key = config.get('pushbullet', 'API_KEY') pushbullet_api_key2 = config.get('pushbullet', 'API_KEY2') start_message = config.get('main', 'START_MESSAGE') end_message = config.get('main', 'END_MESSAGE') twitter_api_key = config.get('twitter', 'api_key') twitter_api_secret = config.get('twitter', 'api_secret') twitter_access_token = config.get('twitter', 'access_token') twitter_access_token_secret = config.get('twitter', 'access_token_secret') send_alert(config.get('main', 'BOOT_MESSAGE')) GPIO.setwarnings(False) GPIO.setmode(GPIO.BCM) GPIO.setup(sensor_pin, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
if not init_log(log_level, log_file): print("init_log() failed: log_level[%s], log_level[%s]" % (log_level, log_file)) exit(1) pexpect_log_file = "startlabviewservice_pexpect.log" try: pexpect_log_file = config.get("common", "pexpect_log_file") except: warn_log("common.pexpect_log_file not found in config file[%s], use[%s]", config_file, pexpect_log_file) try: server = config.get("start", "server") script = config.get("start", "script") wait_time = config.getint("start", "wait_time") download_time = config.getint("start", "download_time") remote_host = config.get("remote", "host") remote_user = config.get("remote", "user") remote_password = config.get("remote", "password") except: error_log("Get config from file[%s] failed: exception type[%s], value[%s]", config_file, sys.exc_info()[0], sys.exc_info()[1]) print("Start imgxfer failed: get config from file[%s] failed!" % config_file) exit(1) try:
if platform.system().lower() == "windows" : import sys db_config_file = os.path.join(os.path.dirname(sys.executable), 'androbugs-db.cfg') print("[Notice] The output format is not good in Windows console") else : db_config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'androbugs-db.cfg') if not os.path.isfile(db_config_file) : print("[ERROR] AndroBugs Framework DB config file not found: " + db_config_file) traceback.print_exc() configParser = SafeConfigParser() configParser.read(db_config_file) MongoDB_Hostname = configParser.get('DB_Config', 'MongoDB_Hostname') MongoDB_Port = configParser.getint('DB_Config', 'MongoDB_Port') MongoDB_Database = configParser.get('DB_Config', 'MongoDB_Database') Collection_Analyze_Result = configParser.get('DB_Collections', 'Collection_Analyze_Result') client = MongoClient(MongoDB_Hostname, MongoDB_Port) db = client[MongoDB_Database] # Name is case-sensitive collection_AppInfo = db[Collection_Analyze_Result] # Name is case-sensitive # collection_AnalyzeResults = db['AnalyzeSuccessResults'] query_condition = dict() if args.analyze_mode : query_condition["analyze_mode"] = args.analyze_mode
phones_list = config.get('model', 'model_dir') + '/files/tiedlist' #words_mlf = config.get('model', 'model_dir') + '/files/words.mlf' words_mlf = config.get('recognition', 'reference_mlf').replace( '|MODEL|', config.get('model', 'model_dir')) ref_del_char = config.get('recognition', 'ref_del_char') dict = config.get('model', 'model_dir') + '/dictionary/dict' dict_hdecode = config.get('model', 'model_dir') + '/dictionary/dict.hdecode' config_hdecode = config.get('model', 'config') orig_config = config.get('model', 'model_dir') + '/config/config' lm = config.get('model', 'lm') lm_rescore = config.get('model', 'lm_rescore') speaker_name_width = config.getint('model', 'speaker_name_width') # Recognition configuration num_tokens = config.getint('recognition', 'num_tokens') lm_scale = config.getfloat('recognition', 'lm_scale') beam = config.getfloat('recognition', 'beam') end_beam = config.getfloat('recognition', 'end_beam') if end_beam < 0: end_beam = (beam * 2.0) / 3.0 max_pruning = config.getint('recognition', 'max_pruning') max_adap_sentences = config.getint('recognition', 'max_adap_sentences') if max_adap_sentences < 0: max_adap_sentences = None num_regtree_nodes_personal = 32
from ConfigParser import SafeConfigParser config = SafeConfigParser( dict(output_tag="", max_data_samples=None, scan_gal_input_map=None, bin_filetered=False)) if len(sys.argv) != 2: l.error("Usage: mpirun -np 3 python dst.py ch6_256.cfg") config.read(sys.argv[1]) input_filename = config.get("dst", "input_filename") mask_filename = config.get("dst", "mask_filename") sampling_frequency = config.getfloat("dst", "sampling_frequency") bin_filtered = config.getboolean("dst", "bin_filtered") nside = config.getint("dst", "nside") max_data_samples = config.get("dst", "max_data_samples") baseline_length = config.getint("dst", "baseline_length") gmres_residual = config.getfloat("dst", "gmres_residual") gmres_iterations = config.getint("dst", "gmres_iterations") scan_gal_input_map = config.get("dst", "scan_gal_input_map") output_tag = config.get("dst", "output_tag") if max_data_samples: max_data_samples = int(max_data_samples) else: max_data_samples = None folder_components = [ "dst", "out", os.path.basename(input_filename).split('.')[0] ]
class GlobalSettings(Signallable): """ Global PiTiVi settings. The settings object loads settings from three different sources: the global configuration, the local configuration file, and the environment. Modules declare which settings they wish to access by calling the addConfigOption() class method during initialization. @cvar options: A dictionnary of available settings. @cvar environment: A list of the controlled environment variables. """ options = {} environment = set() defaults = {} __signals__ = {} def __init__(self, **kwargs): Signallable.__init__(self) self._config = SafeConfigParser() self._readSettingsFromGlobalConfiguration() self._readSettingsFromConfigurationFile() self._readSettingsFromEnvironmentVariables() def _readSettingsFromGlobalConfiguration(self): # ideally, this should read settings from GConf for ex pass def _readSettingsFromConfigurationFile(self): # This reads the configuration from the user configuration file try: pitivi_path = self.get_local_settings_path() pitivi_conf_file_path = os.path.join(pitivi_path, "pitivi.conf") self._config.read(pitivi_conf_file_path) except ParsingError: return for (section, attrname, typ, key, env, value) in self.iterAllOptions(): if not self._config.has_section(section): continue if key and self._config.has_option(section, key): if typ == int or typ == long: # WARNING/FIXME : This try/except is for a small cockup in previous # configurations where we stored a float value... but declared it # as an integer. try: value = self._config.getint(section, key) except ValueError: value = int(self._config.getfloat(section, key)) elif typ == float: value = self._config.getfloat(section, key) elif typ == bool: value = self._config.getboolean(section, key) else: value = self._config.get(section, key) setattr(self, attrname, value) def _readSettingsFromEnvironmentVariables(self): for (section, attrname, typ, key, env, value) in self.iterAllOptions(): var = get_env_by_type(typ, env) if var is not None: setattr(self, attrname, value) def _writeSettingsToConfigurationFile(self): pitivi_path = self.get_local_settings_path() pitivi_conf_file_path = os.path.join(pitivi_path, "pitivi.conf") for (section, attrname, typ, key, env_var, value) in self.iterAllOptions(): if not self._config.has_section(section): self._config.add_section(section) if key: if value is not None: self._config.set(section, key, str(value)) else: self._config.remove_option(section, key) try: file = open(pitivi_conf_file_path, 'w') except IOError, OSError: return self._config.write(file) file.close()