def __init__(self): parser = Parser() parser.read('configuration.ini') # We instantiate the main variables of our program and the models with # the path of our DATABASE (that way we just have it in one place). self._channels = {'voice' : parser.getboolean('notifications', 'voice'), 'text' : parser.getboolean('notifications', 'text')} self.nfc_sensor = NFCSensor() self.th_sensor = THSensor() self.notify = Notify(self._channels) self.serial_number = self._get_serial_number() self.current_sensor = None self._prepare_threads() # Since this is a heavy processing in Flask creating the database # and promises, for now and for the purpose of this task are not # a thing, we just put the program to sleep waiting for the web_service # to be instantiated. time.sleep(1) self._prepare_sensor() self._main_loop()
def __init__(S): S.displaylock = Lock() # S.auto_update_trade = True S.auto_update_depth = True # parse configfile parser = SafeConfigParser() parser.read('traidor.conf') S.exchange = None S.exchanges = [] exchange = MtGox(S, parser) S.addExchange(exchange) S.donated = parser.getboolean('main', 'donated') S.debug = parser.getboolean('main', 'debug') S.continue_on_exception = parser.getboolean('main', 'continue_on_exception') S.auto_update_depth = parser.getboolean('main', 'auto_update_depth') S.display_height = int(parser.get('main','initial_depth_display_height')) S.autoexec = parser.get('main', 'autoexec') #lines = os.environ['LINES'] #print "lines: ", lines S.bots = list() # start command mainloop t = Thread(target = S) t.start()
class CustomModule(Thread): def __init__(self, package): super(CustomModule, self).__init__() self.pkg = package self.conf = SafeConfigParser() self.conf.read('config/malzoo.conf') def log(self, data): if self.conf.getboolean('settings','debug'): dbg_logger(data) return def share_data(self, data): mongodb = MongoDatabase() if self.conf.getboolean('splunk','enabled'): splunkie(data) if self.conf.getboolean('mongo','enabled'): mongodb.add_sample(data) if self.conf.getboolean('elasticsearch','enabled'): elastic(data) if self.conf.getboolean('settings','textlog'): txtlog(data) def run(): pass
def __init__(self, config_path): parser = SafeConfigParser() parser.read(config_path) # loading kancolle browser name self.browser = parser.get('system', 'KANCOLLE_BROWSER') # loading sleep time for each round (second) self.sleep_time = parser.getint('system', 'WAIT_TIME_SECOND') # loading docker number for repairing self.docker_num = parser.getint('fleet', 'BATHROOM_NUM') # loading enable setting self.fight_enabled = parser.getboolean('enable', 'fight') self.dismantling_enabled = parser.getboolean('enable', 'dismantling') self.quest_enabled = parser.getboolean('enable', 'quest') self.expedition_enabled = parser.getboolean('enable', 'expedition') self.docking_enabled = parser.getboolean('enable', 'docking') # loading fight fleet self.fight_fleets = [] for fleet_num, fight_world in self.__get_section_dict(parser, "fight").items(): self.fight_fleets.append(Fleet(int(fleet_num))) # loading expedition fleet and expedition number self.expedition_fleets = [] self.expeditions = [] for fleet_num, expedition_num in self.__get_section_dict(parser, "expedition").items(): self.expedition_fleets.append(Fleet(int(fleet_num))) self.expeditions.append(Expedition(int(expedition_num))) # loading quests self.quests_list = [] for type, ids_raw_str in self.__get_section_dict(parser, "quests").items(): id_list = [id.strip() for id in ids_raw_str.split(',')] self.quests_list.append(Quests(type, id_list))
def __init__(self): """parameters from parameters.ini""" parser = SafeConfigParser() parser.read(configfile) self.mog2History = parser.getint('PeopleCounting', 'mog2History') self.mog2VarThrsh = parser.getint('PeopleCounting', 'mog2VarThrsh') self.mog2Shadow = parser.getboolean('PeopleCounting', 'mog2Shadow') self.mog2LearningRate = parser.getfloat('PeopleCounting', 'mog2LearningRate') self.kernelSize = parser.getint('PeopleCounting', 'kernelSize') self.scale = parser.getfloat('PeopleCounting', 'scale') self.areaThreshold = math.pi * parser.getfloat('PeopleCounting', 'areaRadius')**2 self.peopleBlobSize = parser.getint('PeopleCounting', 'peopleBlobSize') self.distThreshold = parser.getint('PeopleCounting', 'distThreshold') self.countingRegion = map(int, parser.get('PeopleCounting', 'countingRegion').split(',')) self.upperTrackingRegion = map(int, parser.get('PeopleCounting', 'upperTrackingRegion').split(',')) self.lowerTrackingRegion = map(int, parser.get('PeopleCounting', 'lowerTrackingRegion').split(',')) self.inactiveThreshold = parser.getint('PeopleCounting', 'inactiveThreshold') # self.singlePersonBlobSize = parser.getint('PeopleCounting', 'singlePersonBlobSize') self.Debug = parser.getboolean('PeopleCounting', 'Debug') self.Visualize = parser.getboolean('PeopleCounting', 'Visualize') or self.Debug self.useRatioCriteria = parser.getboolean('PeopleCounting', 'useRatioCriteria') self.RTSPurl = parser.get('PeopleCounting','RTSPurl') self.RTSPframerate = parser.getint('PeopleCounting','RTSPframerate') """ASSUMPTION: ppl entering door walk downards(direction = 1) in the video""" self.store_id = parser.getint('store', 'store_id') self.camera_id = parser.getint('store', 'camera_id') self.ipc_username = parser.get('store', 'ipc_username') self.ipc_password = parser.get('store', 'ipc_password') self.wl_dev_cam_id = parser.get('store', 'wl_dev_cam_id')
def parse_config_file(path): """Parse INI files containing IMAP connection details. Used by livetest.py and interact.py """ parser = SafeConfigParser(dict(ssl='false', username=None, password=None, oauth='false', oauth_url=None, oauth_token=None, oauth_token_secret=None)) fh = file(path) parser.readfp(fh) fh.close() section = 'main' assert parser.sections() == [section], 'Only expected a [main] section' try: port = parser.getint(section, 'port') except NoOptionError: port = None return Bunch( host=parser.get(section, 'host'), port=port, ssl=parser.getboolean(section, 'ssl'), username=parser.get(section, 'username'), password=parser.get(section, 'password'), oauth=parser.getboolean(section, 'oauth'), oauth_url=parser.get(section, 'oauth_url'), oauth_token=parser.get(section, 'oauth_token'), oauth_token_secret=parser.get(section, 'oauth_token_secret'), )
def send_mail(etc=""): open_ports = get_ports() ports = pickle.load(open("tcp_ports", "rb")) text = """ Open Ports:<br><br> <table cellspacing="15"> <tr> <th>Port</th> <th>Service</th> </tr> """ for p in open_ports: text += "<tr><td>%s</td><td>%s</td></tr>" % (p, lsofi(p)) parser = SafeConfigParser() parser.read("./stats.conf") msg = MIMEMultipart('related') msg['Subject'] = "Traffic report from %s" % (socket.getfqdn()) msg['From'] = parser.get('email', 'from') msg['To'] = parser.get('email', 'to') msg.preamble = 'This is a multi-part message in MIME format.' body = """ %s<br><br> <img src="cid:graph_packets"><br><br> <img src="cid:graph_conns"><br><br> <img src="cid:graph_bandwidth"><br><br>%s</table>""" % (etc, text) msgBody = MIMEText(body, 'html') msg.attach(msgBody) attachments = [ ('packets.png', 'graph_packets'), ('conns.png', 'graph_conns'), ('bps.png', 'graph_bandwidth') ] for attachment in attachments: fp = open(attachment[0], 'rb') img = MIMEImage(fp.read()) img.add_header('Content-ID', attachment[1]) fp.close() msg.attach(img) s = smtplib.SMTP(parser.get('email', 'smtp_server'), parser.getint('email', 'port')) if parser.getboolean('email', 'auth'): s.ehlo() if parser.getboolean('email', 'use_tls'): s.starttls() s.ehlo() if parser.getboolean('email', 'auth'): s.login(parser.get('email', 'username'), parser.get('email', 'password')) s.sendmail(parser.get('email', 'from'), [parser.get('email', 'to')], msg.as_string()) s.quit()
def read_settings(self, filename): settings_config = SafeConfigParser() settings_config.read(filename) try: # Download section section = "download" download_data = {} download_data["default-folder"] = settings_config.get(section, "default-folder") download_data["no-dialog"] = settings_config.getboolean(section, "no-dialog") # Authentication section section = "authentication" authentication_data = {} authentication_data["store-auth"] = settings_config.getboolean(section, "store-auth") # Automated sync section section = "automated-sync" automate_sync_data = {} automate_sync_data["enabled"] = settings_config.getboolean(section, "enabled") automate_sync_data["only-sync-on-wlan"] = settings_config.getboolean(section, "only-sync-on-wlan") automate_sync_data["update-interval"] = settings_config.getint(section, "update-interval") automate_sync_data["sync-path"] = settings_config.get(section, "sync-path") self.set_current_settings(download_data, authentication_data, automate_sync_data) except NoSectionError: self.write_default_settings() except NoOptionError, e: print "DropN900 config is missing a settings:", e
def load_settings(self): self.default_settings() if not os.path.isfile(self.config_file_name): return config = SafeConfigParser() config.read(self.config_file_name) self.input_directory = config.get('main', 'input_directory').decode('utf-8') self.output_directory = config.get('main', 'output_directory').decode('utf-8') self.video_width = config.getint('main', 'video_width') self.video_height = config.getint('main', 'video_height') self.shift_start = config.getfloat('main', 'pad_start') self.shift_end = config.getfloat('main', 'pad_end') self.time_delta = config.getfloat('main', 'gap_between_phrases') self.is_split_long_phrases = config.getboolean('main', 'is_split_long_phrases') self.phrases_duration_limit = config.getint('main', 'phrases_duration_limit') self.mode = config.get('main', 'mode') self.is_write_output_subtitles = config.getboolean('main', 'is_write_output_subtitles') self.is_ignore_sdh_subtitle = config.getboolean('main', 'is_ignore_sdh_subtitle') self.is_add_dir_to_media_path = config.getboolean('main', 'is_add_dir_to_media_path') value = [e.strip() for e in config.get('main', 'recent_deck_names').decode('utf-8').split(',')] if len(value) != 0: self.recent_deck_names.extendleft(value)
class ADBFuzzConfig: def __init__(self, cfgFile): cfgDefaults = {} cfgDefaults["id"] = None cfgDefaults["remoteHost"] = None cfgDefaults["localPort"] = "8088" cfgDefaults["useWebSockets"] = False cfgDefaults["localWebSocketPort"] = "8089" cfgDefaults["localListenPort"] = "8090" cfgDefaults["localPortOffset"] = "0" cfgDefaults["libDir"] = None cfgDefaults["knownPath"] = None cfgDefaults["debug"] = str(False) cfgDefaults["useMail"] = str(False) cfgDefaults["runTimeout"] = str(5) cfgDefaults["maxLogSize"] = str(1024 * 1024) # Default to 1 MB maximum log cfgDefaults["addon"] = None self.cfg = SafeConfigParser(cfgDefaults) if len(self.cfg.read(cfgFile)) == 0: raise Exception("Unable to read configuration file: " + cfgFile) self.id = self.cfg.get("main", "id") self.fuzzerFile = self.cfg.get("main", "fuzzer") self.runTimeout = self.cfg.getint("main", "runTimeout") self.maxLogSize = self.cfg.getint("main", "maxLogSize") self.remoteAddr = self.cfg.get("main", "remoteHost") self.localAddr = self.cfg.get("main", "localHost") self.localPort = self.cfg.get("main", "localPort") self.debug = self.cfg.getboolean("main", "debug") self.knownPath = self.cfg.get("main", "knownPath") self.addon = self.cfg.get("main", "addon") self.useWebSockets = self.cfg.getboolean("main", "useWebSockets") self.localWebSocketPort = self.cfg.get("main", "localWebSocketPort") self.localListenPort = self.cfg.get("main", "localListenPort") self.localPortOffset = self.cfg.get("main", "localPortOffset") self.libDir = self.cfg.get("main", "libDir") # Mail configuration self.useMail = self.cfg.getboolean("main", "useMail") if self.useMail: self.mailFrom = self.cfg.get("main", "mailFrom") self.mailTo = self.cfg.get("main", "mailTo") self.SMTPHost = self.cfg.get("main", "SMTPHost") # Add our port offset to all local ports portOffset = int(self.localPortOffset) self.localPort = str(int(self.localPort) + portOffset) self.localWebSocketPort = str(int(self.localWebSocketPort) + portOffset) self.localListenPort = str(int(self.localListenPort) + portOffset)
class ADBFuzzConfig: def __init__(self, cfgFile): cfgDefaults = {} cfgDefaults['id'] = None cfgDefaults['remoteHost'] = None cfgDefaults['localPort'] = '8088' cfgDefaults['useWebSockets'] = False cfgDefaults['localWebSocketPort'] = '8089' cfgDefaults['localListenPort'] = '8090' cfgDefaults['localPortOffset'] = '0' cfgDefaults['libDir'] = None cfgDefaults['knownPath'] = None cfgDefaults['debug'] = str(False) cfgDefaults['useMail'] = str(False) cfgDefaults['runTimeout'] = str(5) cfgDefaults['maxLogSize'] = str(1024*1024) # Default to 1 MB maximum log self.cfg = SafeConfigParser(cfgDefaults) if (len(self.cfg.read(cfgFile)) == 0): raise Exception("Unable to read configuration file: " + cfgFile) self.id = self.cfg.get('main', 'id') self.fuzzerFile = self.cfg.get('main', 'fuzzer') self.runTimeout = self.cfg.getint('main', 'runTimeout') self.maxLogSize = self.cfg.getint('main', 'maxLogSize') self.remoteAddr = self.cfg.get('main', 'remoteHost') self.localAddr = self.cfg.get('main', 'localHost') self.localPort = self.cfg.get('main', 'localPort') self.debug = self.cfg.getboolean('main', 'debug') self.knownPath = self.cfg.get('main', 'knownPath') self.useWebSockets = self.cfg.getboolean('main', 'useWebSockets') self.localWebSocketPort = self.cfg.get('main', 'localWebSocketPort') self.localListenPort = self.cfg.get('main', 'localListenPort') self.localPortOffset = self.cfg.get('main', 'localPortOffset') self.libDir = self.cfg.get('main', 'libDir') # Mail configuration self.useMail = self.cfg.getboolean('main', 'useMail') if self.useMail: self.mailFrom = self.cfg.get('main', 'mailFrom') self.mailTo = self.cfg.get('main', 'mailTo') self.SMTPHost = self.cfg.get('main', 'SMTPHost') # Add our port offset to all local ports portOffset = int(self.localPortOffset) self.localPort = str(int(self.localPort) + portOffset) self.localWebSocketPort = str(int(self.localWebSocketPort) + portOffset) self.localListenPort = str(int(self.localListenPort) + portOffset)
def parseConfig(): configParser = SafeConfigParser() configParser.read('MyoConfig.ini') global consoleOutput, fileOutput, dbOutput, dataDir, dbUsername, dbPassword consoleOutput = configParser.getboolean('OutputOptions', 'Console') fileOutput = configParser.getboolean('OutputOptions', 'Files') dbOutput = configParser.getboolean('OutputOptions', 'DB') dataDir = configParser.get('OutputOptions', 'DataDir') dbUsername = configParser.get('DBLogin', 'Username') dbPassword = configParser.get('DBLogin', 'Password')
def __init__(self, extra_args=None, description=None, example=None): self.logger = logging.getLogger('fts3') # Common CLI options usage = None if extra_args: usage = "usage: %prog [options] " + extra_args config = SafeConfigParser(defaults=CONFIG_DEFAULTS) section = CONFIG_DEFAULTSECTION config.read(CONFIG_FILENAMES) # manually set the section in edge cases if not config.has_section('Main'): section = 'DEFAULT' # manually get values for which we need to support None opt_endpoint = config.get(section, 'endpoint') if opt_endpoint == 'None': opt_endpoint = None opt_ukey = config.get(section, 'ukey') if opt_ukey == 'None': opt_ukey = None opt_ucert = config.get(section, 'ucert') if opt_ucert == 'None': opt_ucert = None if sys.version_info < (2, 6): self.opt_parser = OptionParser(usage=usage, description=description, formatter=_Formatter()) else: self.opt_parser = OptionParser(usage=usage, description=description, epilog=example, formatter=_Formatter()) self.opt_parser.add_option('-v', '--verbose', dest='verbose', action='store_true', help='verbose output.', default=config.getboolean(section, 'verbose')) self.opt_parser.add_option('-s', '--endpoint', dest='endpoint', help='FTS3 REST endpoint.', default=opt_endpoint) self.opt_parser.add_option('-j', '--json', dest='json', action='store_true', help='print the output in JSON format.', default=config.getboolean(section, 'json')) self.opt_parser.add_option('--key', dest='ukey', help='the user certificate private key.', default=opt_ukey) self.opt_parser.add_option('--cert', dest='ucert', help='the user certificate.', default=opt_ucert) self.opt_parser.add_option('--insecure', dest='verify', default=True, action='store_false', help='do not validate the server certificate') self.opt_parser.add_option('--access-token', dest='access_token', help='OAuth2 access token (supported only by some endpoints, takes precedence)', default=None)
def parse_config_file(path): """Parse INI files containing IMAP connection details. Used by livetest.py and interact.py """ parser = SafeConfigParser(dict( username=None, password=None, ssl='false', stream='false', oauth='false', oauth_token=None, oauth_token_secret=None, oauth_url=None, oauth2='false', oauth2_client_id=None, oauth2_client_secret=None, oauth2_refresh_token=None, )) with open(path, 'r') as fh: parser.readfp(fh) section = 'main' assert parser.sections() == [section], 'Only expected a [main] section' try: port = parser.getint(section, 'port') except NoOptionError: port = None return Bunch( host=parser.get(section, 'host'), port=port, ssl=parser.getboolean(section, 'ssl'), stream=parser.getboolean(section, 'stream'), username=parser.get(section, 'username'), password=parser.get(section, 'password'), oauth=parser.getboolean(section, 'oauth'), oauth_url=parser.get(section, 'oauth_url'), oauth_token=parser.get(section, 'oauth_token'), oauth_token_secret=parser.get(section, 'oauth_token_secret'), oauth2=parser.getboolean(section, 'oauth2'), oauth2_client_id=parser.get(section, 'oauth2_client_id'), oauth2_client_secret=parser.get(section, 'oauth2_client_secret'), oauth2_refresh_token=parser.get(section, 'oauth2_refresh_token'), )
def __get_configs(region): cloudmonkey_config = os.path.expanduser('~/.cloudmonkey/config') if os.path.isfile(cloudmonkey_config): app.logger.debug("Loading configs from cloudmonkey/config") parser = SafeConfigParser() parser.read(cloudmonkey_config) if parser.has_section(region): apikey = parser.get(region, 'apikey') api_url = parser.get(region, 'url') secretkey = parser.get(region, 'secretkey') verifysslcert = parser.getboolean(region, 'verifysslcert') else: raise EnvironmentError("Cloudmonkey config does not have the region " + region) else: region = region.upper() app.logger.debug("Loading from env variables: " + region) apikey = os.getenv(region + '_APIKEY', '') secretkey = os.getenv(region + '_SECRETKEY', '') api_url = os.getenv(region + '_URL', '') verifysslcert = os.getenv(region + '_VERIFYSSLCERT', '').upper() == 'TRUE' if apikey == '' or secretkey == '' or api_url == '': app.logger.exception("Variables values for region " + region + " SIZE APIKEY: " + str(len(apikey)) + ", SIZE SECRETKEY: " + str(len(secretkey)) + ", URL: " + api_url + ", VERIFYSSLCERT: " + str(verifysslcert)) raise EnvironmentError("Should define env variables for region: {0} ( {0}_APIKEY, {0}_SECRETKEY, {0}_URL, {0}_VERIFYSSLCERT )".format(region)) return { "apikey": apikey, "api_url": api_url, "secretkey": secretkey, "verifysslcert": verifysslcert }
def get_all(cls, return_dict=False): items = OrderedDict() try: wp = WikiPage.get(*cls._get_wiki_config()) except NotFound: return items if return_dict else items.values() wp_content = StringIO(wp.content) cfg = SafeConfigParser(allow_no_value=True) cfg.readfp(wp_content) for section in cfg.sections(): def_values = {'id': section} for name, value in cfg.items(section): # coerce boolean variables if name in cls._bool_values: def_values[name] = cfg.getboolean(section, name) else: def_values[name] = value try: item = cls(**def_values) except TypeError: # a required variable wasn't set for this item, skip continue if item.is_enabled: items[section] = item return items if return_dict else items.values()
def loadConfig(self): pyec2_conf = ['%s/.pyec2' % HOME, '/etc/pyec2.cfg', 'pyec2.cfg.sample'] for pyec2 in pyec2_conf: try: with open(pyec2) as f: parser = SafeConfigParser() parser.read(pyec2) conf = dict() conf['pyec2'] = dict() conf['pyec2']['key_dir'] = parser.get('pyec2', 'key_dir') conf['pyec2']['key_extension'] = parser.get('pyec2', 'key_extension') try: conf['pyec2']['custom_key_file'] = parser.get('pyec2', 'custom_key_file') except Exception: pass conf['pyec2']['log_level'] = parser.get('pyec2', 'log_level') conf['pyec2']['add_to_known_hosts'] = parser.getboolean('pyec2', 'add_to_known_hosts') conf['pyec2']['prepend_file'] = parser.get('pyec2', 'prepend_file') conf['aws'] = dict() conf['aws']['usernames'] = parser.get('aws', 'usernames').split(',') conf['aws']['aws_access_key_id'] = parser.get('aws', 'aws_access_key_id') conf['aws']['aws_secret_access_key'] = parser.get('aws', 'aws_secret_access_key') conf['aws']['ec2_region'] = parser.get('aws', 'ec2_region') conf['aws']['name_tag'] = parser.get('aws', 'name_tag') return conf except IOError, e: print e except NoSectionError, e: print 'It seems that something is wrong with your configuration, please create a new one now.' self.newConfig()
def config_init(): sys.path.append(BASEDIR) module = sys.modules[__name__] parser = SafeConfigParser() files = parser.read(['utuputki.conf', os.path.expanduser('~/.utuputki.conf'), '/etc/utuputki.conf']) for m_file in files: print("Config: Read {} !".format(m_file)) # Read configs and get either new value or default settings = {} for key in config_values: if parser.has_option('utuputki', key): if type(getattr(module, key)) is int: settings[key] = parser.getint('utuputki', key) elif type(getattr(module, key)) is bool: settings[key] = parser.getboolean('utuputki', key) else: settings[key] = parser.get('utuputki', key) else: settings[key] = getattr(module, key) # Put configs to globals for name, value in settings.iteritems(): setattr(module, name, value)
def getboolean(self, section, option, default=None): try: return SafeConfigParser.getboolean(self, section, option) except (NoSectionError, NoOptionError), err: if default is not None: return default raise err
def __init__(self,config=None,opts=None): # not enough info to execute if config==None and opts==None: print "Please specify command option or config file ..." return # config parser parser = SafeConfigParser() parser.read(config) self.debug = parser.getboolean('knn','debug') self.seed = parser.getint('knn','random_seed') self.obj = 'dt' self.trainfile = parser.get('knn','train') self.validfile = parser.get('knn','valid') self.testfile = parser.get('knn','test') self.vocabfile = parser.get('knn','vocab') self.domain = parser.get('knn','domain') self.percentage = float(parser.getfloat('knn','percentage'))/100.0 # Setting generation specific parameters self.topk = parser.getint('knn','topk') self.detectpairs= parser.get('knn','detectpairs') self.verbose = parser.getint('knn','verbose') # set random seed np.random.seed(self.seed) random.seed(self.seed) np.set_printoptions(precision=4) # setting data reader, processors, and lexicon self.setupSideOperators()
def load(self, irc_client): # Find each plugin to load by the entries in the conf dir conf_files = [f for f in os.listdir(self.conf_dir) if f.endswith('.conf')] all_listeners = [] for conf_file in conf_files: plugin_name = conf_file.split('.')[0] full_path = os.path.join(self.conf_dir, conf_file) plugin_conf = SafeConfigParser() plugin_conf.read([self.bot_config_filename, full_path]) if not plugin_conf.has_section('plugin') or not plugin_conf.has_option('plugin', 'enabled'): LOG.warning('Could not find enabled property for plugin configuration [%s]' % full_path) continue if not plugin_conf.getboolean('plugin', 'enabled'): LOG.info('Skipping disabled plugin [%s]' % plugin_name) continue try: plugin_module = self._load_module(plugin_name) listeners = plugin_module.init_plugin(plugin_conf, irc_client) all_listeners += listeners except Exception: LOG.exception('Error initializing plugin [%s]' % plugin_name) continue return all_listeners
def run(): """Get the arguments and parse the config file. Activate console. Get servers from the config file or from arguments. Show the query screen.""" arguments = parseArguments() config = SafeConfigParser({'username': arguments.username, 'password': arguments.password}) config.read(arguments.conf) servers = commonServers(config, arguments) chosenServers = {} for choice in choices: if config.sections(): chosenServers[choice] = [] for server in servers: if not config.has_option(str(server), choice) or config.getboolean(str(server), choice): chosenServers[choice].append(server) else: chosenServers[choice] = servers # Global config params params = {} if config.has_option('DEFAULT', 'ignoreDbs') : params['ignoreDbs'] = config.get('DEFAULT', 'ignoreDbs').split(',') if config.has_option('DEFAULT', 'minSecsRunning') : params['minSecsRunning'] = int(config.get('DEFAULT', 'minSecsRunning')) with Console() as console: queryScreen = QueryScreen(console, chosenServers, autoKillSeconds=arguments.autoKillSeconds, params = params) try: queryScreen.action() except KeyboardInterrupt: pass
def main(argv=sys.argv[1:]): try: imagename = argv[0] try: s = SafeConfigParser() s3cfg = os.getenv("HOME") + "/.s3cfg" s.readfp(open(s3cfg, "r")) s3id = s.get("default", "access_key") pw = s.get("default", "secret_key") host_base = s.get("default", "host_base") use_https = s.getboolean("default", "use_https") hba = host_base.split(":", 1) if len(hba) == 2: port = int(hba[1]) else: port = 8888 host = hba[0] except Exception, ex: print "This program uses the s3cmd configuration file ~/.s3cfg" print ex sys.exit(1) print "getting connection" ec2conn = EC2Connection(s3id, pw, host='locahost', port=8444, debug=2) ec2conn.host = 'localhost' print "getting image" image = ec2conn.get_image(imagename) print "running" res = image.run(min_count=2, max_count=4) res.stop_all()
def parse_config(conf_file): '''Returns a dict with all global config data''' defaults = { 'log_level' : 'INFO', 'max_logs' : '5', 'skip_filetypes' : '', 'exclude' : [], } parser = SafeConfigParser(defaults=defaults) parser.read(conf_file) conf = {} conf['dho_access_key'] = parser.get('settings', 'access_key', raw=True) conf['dho_secret_key'] = parser.get('settings', 'secret_key', raw=True) conf['passphrase'] = parser.get('settings', 'passphrase', raw=True) conf['log_level'] = parser.get('settings', 'log_level') conf['log_file'] = parser.get('settings', 'log_file') conf['max_logs'] = parser.getint('settings', 'max_logs') conf['backup_zones'] = [] for section in parser.sections(): if section != 'settings': conf['backup_zones'].append( { 'directory' : parser.get(section, 'directory'), 'bucket' : parser.get(section, 'bucket'), 'encrypt' : parser.getboolean(section, 'encrypt'), 'exclude' : map(parse_excludes, (ex.strip() for ex in parser.get(section, 'exclude').split(','))), } ) return conf
class Config: def __init__(self): self.log = Logger() try: self.conf = SafeConfigParser() self.file = "unrealpsd.conf" self.conf.read(self.file) except: self.log.output("Error loading configuration file!") def getOption(self, section, option): try: return self.conf.get(section, option) except: self.log.output("Can't read configuration option!") def getInt(self, section, option): try: return self.conf.getint(section, option) except: self.log.output("Can't read configuration option!") def getBoolean(self, section, option): try: return self.conf.getboolean(section, option) except: self.log.output("Can't read configuration option!") def hasOption(self, section, option): return self.conf.has_option(section, option)
def _load(cls, configfiles, verbose=False): """ Load configuration from the given files in reverse order, if they exist and have a [nodeenv] section. """ for configfile in reversed(configfiles): configfile = os.path.expanduser(configfile) if not os.path.exists(configfile): continue ini_file = ConfigParser() ini_file.read(configfile) section = "nodeenv" if not ini_file.has_section(section): continue for attr, val in iteritems(vars(cls)): if attr.startswith('_') or not \ ini_file.has_option(section, attr): continue if isinstance(val, bool): val = ini_file.getboolean(section, attr) else: val = ini_file.get(section, attr) if verbose: print('CONFIG {0}: {1} = {2}'.format( os.path.basename(configfile), attr, val)) setattr(cls, attr, val)
def merge_configfile(self): if self.pass_args: args = self.main_parser.parse_args(self.pass_args) else: args = self.main_parser.parse_args() # load config files with settings # puts them into a dict format "section_option" merged_defaults = DEFAULTS.copy() if hasattr(args, 'config') and args.config: config = SafeConfigParser() config.readfp(codecs.open(args.config, "r", "utf8")) if not config: print("Can't open file '{}'".format(args.config)) sys.exit(0) else: for section in CONF_SECTIONS: conf = {} if config.has_section(section): for k, v in config.items(section): prop = '{}_{}'.format(section, k) if prop in BOOLEAN_TYPES: conf[prop] = config.getboolean(section, k) else: conf[prop] = config.get(section, k) merged_defaults.update(conf) self.merged_defaults = merged_defaults
def main(): from eventlet import wsgi,wrap_ssl,listen from ankisyncd.thread import shutdown config = SafeConfigParser() config.read("ankisyncd.conf") ankiserver = SyncApp(config) host = config.get("sync_app", "host") port = config.getint("sync_app", "port") if(config.getboolean("sync_app", "ssl")): certfile = config.get("sync_app", "certfile") keyfile = config.get("sync_app", "keyfile") socket = wrap_ssl( listen((host, port)), certfile=certfile, keyfile=keyfile, server_side=True ) else: socket = listen((host, port)) try: wsgi.server(socket, ankiserver) finally: shutdown()
def loadSettings(settings, file, section = "default"): '''Load settings from file. Keyword arguments: settings - default settings from parrent class file - abs path to file section - section name (default = default) ''' ini_parser = SafeConfigParser() ini_parser.read(makePath(file)) if not section in ini_parser.sections(): msg = "V souboru %s nenalezena sekce %s." %(file, section) raise SettingsSectionNotFound(msg) for key in settings.keys(): tmp = settings[key] if ini_parser.has_option(section, key): if type(settings[key]) == int: tmp = ini_parser.getint(section, key) elif type(settings[key]) == bool: tmp = ini_parser.getboolean(section, key) else: tmp = ini_parser.get(section, key) settings[key] = tmp
def __init__(self,args): ''' Constructor ''' self.git_remote_url = 'review.typo3.org' #self.git_remote_url = 'ssh:\/\/[email protected]:29418' review_host = 'review.typo3.org' #review_host = '-p 29418 [email protected]' self.ssh_cmd = 'ssh ' + review_host #self.forge_db_id will be set in get_check_forge_id self.forge_db_id = False #self.old_svn_path will be set somewhere below self.old_svn_path = False parser = SafeConfigParser() # .secret parser.read('.secret.cfg') self.forge_db = parser.get('forge', 'db') self.forge_user = parser.get('forge', 'user') self.forge_pw = parser.get('forge', 'pw') self.robot_user = parser.get('gerrit', 'robot_user') # config parser.read('config.cfg') self.interactive = parser.getboolean('config', 'interactive') if args.interactive_false: self.interactive = False self.debug = parser.get('config', 'debug')
def parse(self, node, fileContents): """ Parses a .pointer file whose contents will be read by the given input stream """ cfg = SafeConfigParser() strFp = StringIO.StringIO(fileContents) try: cfg.readfp(strFp) assert cfg.has_section(NodeParser.NODE_SECTION), 'Invalid .node file, a node section hasn''t been defined' # read node's options if cfg.has_option(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_DESC): node.description = cfg.get(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_DESC) if cfg.has_option(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_CUBEMAP): node.cubemap = cfg.get(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_CUBEMAP) if cfg.has_option(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_IMAGE): node.image = cfg.get(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_IMAGE) if cfg.has_option(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_BGCOLOR): bgColorStr = cfg.get(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_BGCOLOR) node.bgColor = [x.strip() for x in bgColorStr.split(',')] if cfg.has_option(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_SCRIPT): node.scriptName = cfg.get(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_SCRIPT) if cfg.has_option(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_LOOKAT): node.lookat = cfg.get(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_LOOKAT) if cfg.has_option(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_EXTENSION): node.extension = cfg.get(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_EXTENSION) if cfg.has_option(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_PARENT): parent = cfg.get(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_PARENT) if parent == 'render2d': node.parent2d = Node.PT_Render2D elif parent == 'aspect2d': node.parent2d = Node.PT_Aspect2D else: node.parent2d = None if cfg.has_option(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_PLAYLIST): node.musicPlaylist = cfg.get(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_PLAYLIST) if cfg.has_option(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_HOTSPOTS_MAP): node.hotspotsMapFilename = cfg.get(NodeParser.NODE_SECTION, NodeParser.NODE_OPT_HOTSPOTS_MAP) for s in cfg.sections(): if s.startswith('hotspot_'): hp = Hotspot(name = s[8:]) if cfg.has_option(s, NodeParser.HOTSPOT_OPT_FACE): face = cfg.get(s, NodeParser.HOTSPOT_OPT_FACE) assert self.__facesCodes.has_key(face), 'invalid face name: ' + face hp.face = self.__facesCodes[face] if cfg.has_option(s, NodeParser.HOTSPOT_OPT_LOOKTEXT): hp.description = cfg.get(s, NodeParser.HOTSPOT_OPT_LOOKTEXT) if cfg.has_option(s, NodeParser.HOTSPOT_OPT_XO): hp.setXo(cfg.getint(s, NodeParser.HOTSPOT_OPT_XO)) if cfg.has_option(s, NodeParser.HOTSPOT_OPT_YO): hp.setYo(cfg.getint(s, NodeParser.HOTSPOT_OPT_YO)) if cfg.has_option(s, NodeParser.HOTSPOT_OPT_XE): hp.setXe(cfg.getint(s, NodeParser.HOTSPOT_OPT_XE)) if cfg.has_option(s, NodeParser.HOTSPOT_OPT_YE): hp.setYe(cfg.getint(s, NodeParser.HOTSPOT_OPT_YE)) if cfg.has_option(s, NodeParser.HOTSPOT_OPT_WIDTH): hp.setWidth(cfg.getint(s, NodeParser.HOTSPOT_OPT_WIDTH)) if cfg.has_option(s, NodeParser.HOTSPOT_OPT_HEIGHT): hp.setHeight(cfg.getint(s, NodeParser.HOTSPOT_OPT_HEIGHT)) if cfg.has_option(s, NodeParser.HOTSPOT_OPT_ACTION): actionStr = cfg.get(s, NodeParser.HOTSPOT_OPT_ACTION) argList = [x.strip() for x in actionStr.split(',')] hp.action = argList.pop(0) hp.actionArgs = argList if cfg.has_option(s, NodeParser.HOTSPOT_OPT_ACTIONARGS): hp.actionArgs = cfg.get(s, NodeParser.HOTSPOT_OPT_ACTIONARGS) if cfg.has_option(s, NodeParser.HOTSPOT_OPT_ACTIVE): hp.active = cfg.getboolean(s, NodeParser.HOTSPOT_OPT_ACTIVE) if cfg.has_option(s, NodeParser.HOTSPOT_OPT_CURSOR): hp.cursor = cfg.get(s, NodeParser.HOTSPOT_OPT_CURSOR) if cfg.has_option(s, NodeParser.HOTSPOT_OPT_SPRITE): hp.sprite = cfg.get(s, NodeParser.HOTSPOT_OPT_SPRITE) if cfg.has_option(s, NodeParser.HOTSPOT_OPT_ITEMINTERACT): hp.itemInteractive = cfg.getboolean(s, NodeParser.HOTSPOT_OPT_ITEMINTERACT) if cfg.has_option(s, NodeParser.HOTSPOT_OPT_CLICKMASK): hp.clickMask = cfg.get(s, NodeParser.HOTSPOT_OPT_CLICKMASK) node.addHotspot(hp) except (ConfigParser.MissingSectionHeaderError, ConfigParser.ParsingError): raise ParseException(error='error.parse.invalid', resFile=node.name + '.node') except IOError, e: raise ParseException(error='error.parse.io', resFile=node.name + '.node', args=(str(e)))
def getboolean(self, sec, key, val=None, strict=True): return SafeConfigParser.getboolean(self, sec, key)
def main(args): config = ConfigParser({"sentences": "False"}) config.read(args.config_file) corpus_filename = config.get("main", "corpus_file") model_path = config.get("main", "path") if config.getboolean("main", "sentences"): from vsm.extensions.ldasentences import CorpusSent as Corpus else: from vsm.corpus import Corpus if args.k is None: try: if config.get("main", "topics"): default = ' '.join(map(str, eval(config.get("main", "topics")))) else: raise NoOptionError except NoOptionError: default = ' '.join(map(str, range(20, 100, 20))) while args.k is None: ks = raw_input( "Number of Topics [Default '{0}']: ".format(default)) try: if ks: args.k = [int(n) for n in ks.split()] elif not ks.strip(): args.k = [int(n) for n in default.split()] if args.k: print "\nTIP: number of topics can be specified with argument '-k N N N ...':" print " vsm train %s -k %s\n" %\ (args.config_file, ' '.join(map(str, args.k))) except ValueError: print "Enter valid integers, separated by spaces!" if args.processes < 0: import multiprocessing args.processes = multiprocessing.cpu_count() + args.processes print "Loading corpus... " corpus = Corpus.load(corpus_filename) try: model_pattern = config.get("main", "model_pattern") except NoOptionError: model_pattern = None if model_pattern is not None and\ bool_prompt("Existing models found. Continue training?", default=True): from vsm.model.lda import LDA m = LDA.load(model_pattern.format(args.k[0]), multiprocessing=args.processes > 1, n_proc=args.processes) if args.iter is None: args.iter = int_prompt("Total number of training iterations:", default=int(m.iteration * 1.5), min=m.iteration) print "\nTIP: number of training iterations can be specified with argument '--iter N':" print " vsm train --iter %d %s\n" % (args.iter, args.config_file) del m # if the set changes, build some new models and continue some old ones config_topics = eval(config.get("main", "topics")) if args.k != config_topics: new_models = set(args.k) - set(config_topics) continuing_models = set(args.k) & set(config_topics) build_models(corpus, corpus_filename, model_path, config.get("main", "context_type"), new_models, n_iterations=args.iter, n_proc=args.processes, seed=args.seed) model_pattern = continue_training(model_pattern, continuing_models, args.iter, n_proc=args.processes) else: model_pattern = continue_training(model_pattern, args.k, args.iter, n_proc=args.processes) else: # build a new model if args.iter is None: args.iter = int_prompt("Number of training iterations:", default=200) print "\nTIP: number of training iterations can be specified with argument '--iter N':" print " vsm train --iter %d %s\n" % (args.iter, args.config_file) ctxs = corpus.context_types ctxs = sorted(ctxs, key=lambda ctx: len(corpus.view_contexts(ctx))) if args.context_type not in ctxs: while args.context_type not in ctxs: contexts = ctxs[:] contexts[0] = contexts[0].upper() contexts = '/'.join(contexts) args.context_type = raw_input("Select a context type [%s] : " % contexts) if args.context_type.strip() == '': args.context_type = ctxs[0] if args.context_type == ctxs[0].upper(): args.context_type = ctxs[0] print "\nTIP: context type can be specified with argument '--context-type TYPE':" print " vsm train --context-type %s %s\n" % ( args.context_type, args.config_file) print "\nTIP: This configuration can be automated as:" print " vsm train %s --iter %d --context-type %s -k %s\n" %\ (args.config_file, args.iter, args.context_type, ' '.join(map(str, args.k))) model_pattern = build_models(corpus, corpus_filename, model_path, args.context_type, args.k, n_iterations=args.iter, n_proc=args.processes, seed=args.seed, dry_run=args.dry_run) config.set("main", "model_pattern", model_pattern) if args.context_type: # test for presence, since continuing doesn't require context_type config.set("main", "context_type", args.context_type) args.k.sort() config.set("main", "topics", str(args.k)) if not args.dry_run: with open(args.config_file, "wb") as configfh: config.write(configfh)
response.generic_patterns = ['*'] if request.is_local else [] ## (optional) optimize handling of static files # response.optimize_css = 'concat,minify,inline' # response.optimize_js = 'concat,minify,inline' ######################################################################### ## Here is sample code if you need for ## - email capabilities ## - authentication (registration, login, logout, ... ) ## - authorization (role based authorization) ## - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss) ## - old style crud actions ## (more options discussed in gluon/tools.py) ######################################################################### SECURE_SESSIONS_WITH_HTTPS = conf.getboolean("security", "secure_sessions_with_HTTPS") # This is set to 'true' during deployment if our wildcard cert file is found. We # assume this means all prerequisites for HTTPS/SSL are complete. from gluon.tools import Auth, Crud, Service, PluginManager, prettydate auth = Auth(db, secure=SECURE_SESSIONS_WITH_HTTPS) crud, service, plugins = Crud(db), Service(), PluginManager() ## configure email mail = auth.settings.mailer mail.settings.server = 'logging' or 'smtp.gmail.com:587' mail.settings.sender = '*****@*****.**' mail.settings.login = '******' ## configure auth policy auth.settings.registration_requires_verification = False
ACCOUNT_ACTIVATION_DAYS = 2 PASSWD_MIN_LENGTH = 6 PASSWD_FORCE_SPECIAL_CHARS = False PASSWD_FORCE_DIGITS = False PASSWD_FORCE_LETTERS = False BASIC_AUTH_REQUIRED_ON_LOCALHOST = False config = SafeConfigParser() if not config.read(CONFIG_FILES): raise RuntimeError('Could not find ECM configuration. Looked in %s.' % CONFIG_FILES) EXTERNAL_HOST_NAME = config.get('misc', 'external_host_name') USE_HTTPS = config.getboolean('misc', 'use_https') EVEAPI_STUB_ENABLED = config.getboolean('misc', 'eveapi_stub_enabled') EVEAPI_STUB_FILES_ROOT = config.get('misc', 'eveapi_stub_files_root') ############################################################################### ################### # DJANGO SETTINGS # ################### DEBUG = config.getboolean('misc', 'debug') def get_db_config(): engine = config.get('database', 'ecm_engine') if engine == 'django.db.backends.sqlite3': folder = config.get('database', 'sqlite_db_dir') or rel_path('db/')
def __init__(self, config_path): ''' hierarchy: all_output_prefix / beyond.. In Train: train result prefix / model_config_name In Dev: Dev_Running_Prefix In Test: Test_Running_Prefix ''' parser = SafeConfigParser() sys.stderr.write("Use parser: {}\n".format(config_path)) parser.read(config_path) self.machine = parser.get('Constant', 'Machine') if self.machine == 'galaxy': torch.backends.cudnn.enabled = False self.resource_prefix = '/public/ComplexWebQuestions_Resources' elif self.machine == 'gelato': self.resource_prefix = '/z/zxycarol/ComplexWebQuestions_Resources' else: raise OSError('No such machine exists') self.cache_Dir = os.path.join(self.resource_prefix, 'cache') sys.stderr.write("Use Machine {}\n".format(self.machine)) self.vocab_prefix = os.path.join( self.resource_prefix, parser.get('Constant', 'Vocab_Prefix')) '''runtime section''' self.use_cache = parser.getboolean('Runtime', 'Use_Cache') self.device = parser.getint('Runtime', 'Device') torch.cuda.set_device(self.device) self.do_train, self.do_dev, self.do_test = parser.getboolean('Runtime', 'Train'), \ parser.getboolean('Runtime', 'Dev'), \ parser.getboolean('Runtime', 'Test') sys.stderr.write('Train: {}, Dev: {}, Test: {}\n'.format( self.do_train, self.do_dev, self.do_test)) '''train section''' self.train_batch_size = parser.getint('Train', 'Batch_Size') self.max_epoch = parser.getint('Train', 'Max_Epoch') self.reward_threshold = parser.getfloat('Train', 'Reward_Threshold') self.optimizer = parser.get('Train', 'Opitimizer').lower() self.learning_rate = parser.getfloat('Train', 'Learning_Rate') self.lr_gamma = parser.getfloat('Train', 'LR_Gamma') self.dropout = parser.getfloat('Train', 'Dropout') self.use_constraint = parser.getboolean('Train', 'Use_Constraint') self.use_attn = parser.getboolean('Train', 'Use_Attn') self.attn_dim = parser.getint('Train', 'Attn_Dim') self.always_pooling = parser.get('Train', 'Always_Pooling') self.pooling_threshold = parser.get('Train', 'Pooling_Threshold') self.use_entity_type = parser.getboolean('Train', 'Entity_Type') self.train_dataset_path = self.resource_prefix + parser.get( 'Train', 'Train_Dataset_Path') self.train_result_prefix = self.resource_prefix + parser.get( 'Train', 'Train_Result_Prefix') self.el_score = False self.prior_weights = False self.train_cache_prefix = '' try: self.el_score = parser.getboolean('Train', 'EL_Score') except: pass try: self.prior_weights = parser.getboolean('Train', 'Prior_Weights') except: pass try: self.train_cache_prefix = parser.get('Train', 'Train_Cache_Prefix') except: pass sys.stderr.write("Device: {}, Attn: {}, Entity Type: {}, EL Score: {}, Prior Weights: {}, Dropout: {}, LR: {}, LR_Gamma: {}\n".format( self.device, self.use_attn, self.use_entity_type, self.el_score, self.prior_weights, self.dropout, \ self.learning_rate, self.lr_gamma)) '''dev section''' self.dev_running_prefix = self.resource_prefix + parser.get( 'Dev', 'Dev_Running_Prefix') self.dev_dataset_path = self.resource_prefix + parser.get( 'Dev', 'Dev_Dataset_Path') self.dev_src_path = self.resource_prefix + parser.get( 'Dev', 'Dev_Src_Path') self.dev_sub1_cands_dir = self.resource_prefix + parser.get( 'Dev', 'Sub1_Cands_Dir') self.dev_sub2_cands_dir = self.resource_prefix + parser.get( 'Dev', 'Sub2_Cands_Dir') '''test section''' self.test_dataset_path = self.resource_prefix + parser.get( 'Test', 'Test_Dataset_Path') self.test_running_prefix = self.resource_prefix + parser.get( 'Test', 'Test_Running_Prefix') self.test_src_path = self.resource_prefix + parser.get( 'Test', 'Test_Src_Path') self.test_sub1_cands_dir = self.resource_prefix + parser.get( 'Test', 'Sub1_Cands_Dir') self.test_sub2_cands_dir = self.resource_prefix + parser.get( 'Test', 'Sub2_Cands_Dir') self.test_model_prefix = None self.test_rough_estimate = False try: self.test_model_prefix = parser.get('Test', 'Model_Prefix') except: sys.stderr.write('No model provided') try: self.test_rough_estimate = parser.getboolean( 'Test', 'Rough_Estimate') except: pass self.model_conf_name = "CONS{}_OPT{}_LR{}_GA{}_ATTN{}{}_DO{}_PR{}".format( self.use_constraint, self.optimizer, self.learning_rate, self.lr_gamma, self.use_attn, self.attn_dim, self.dropout, self.prior_weights) '''train model prefix''' if self.do_train: if not os.path.exists(self.train_result_prefix): if not subprocess.call('mkdir ' + self.train_result_prefix, shell=True): raise OSError('cannot mkdir ' + self.train_result_prefix) else: sys.stderr.write('Successful mkdir {}\n'.format( self.train_result_prefix)) if not os.path.exists( os.path.join(self.train_result_prefix, self.model_conf_name)): if subprocess.call('mkdir ' + os.path.join( self.train_result_prefix, self.model_conf_name), shell=True): raise OSError('cannot mkdir ' + os.path.join( self.train_result_prefix, self.model_conf_name)) else: sys.stderr.write('Successful mkdir {}\n'.format( os.path.join(self.train_result_prefix, self.model_conf_name))) '''dev and test running''' if self.do_dev: if not os.path.exists(self.dev_running_prefix): if subprocess.call('mkdir ' + self.dev_running_prefix, shell=True): raise OSError('cannot mkdir ' + self.dev_running_prefix) else: sys.stderr.write('Successful mkdir ' + self.dev_running_prefix) if not os.path.exists( os.path.join(self.dev_running_prefix, self.model_conf_name)): if subprocess.call('mkdir ' + os.path.join( self.dev_running_prefix, self.model_conf_name), shell=True): raise OSError('cannot mkdir ' + os.path.join( self.dev_running_prefix, self.model_conf_name)) else: sys.stderr.write('Successful mkdir {}\n'.format( os.path.join(self.dev_running_prefix, self.model_conf_name))) if self.do_test: if not os.path.exists(self.test_running_prefix): if subprocess.call('mkdir ' + self.test_running_prefix, shell=True): raise OSError('cannot mkdir ' + self.test_running_prefix) else: sys.stderr.write('Successful mkdir {}\n'.format( self.test_running_prefix)) else: sys.stderr.write("{} Exists\n".format( self.test_running_prefix)) if not os.path.exists( os.path.join(self.test_running_prefix, self.model_conf_name)): if subprocess.call('mkdir ' + os.path.join( self.test_running_prefix, self.model_conf_name), shell=True): raise OSError('cannot mkdir ' + os.path.join( self.test_running_prefix, self.model_conf_name)) else: sys.stderr.write('Successful mkdir {}\n'.format( os.path.join(self.test_running_prefix, self.model_conf_name))) else: sys.stderr.write("{} Exists\n".format( os.path.join(self.test_running_prefix, self.model_conf_name))) '''load resources''' self.q_word_to_idx = cPickle.load( open( os.path.join(self.vocab_prefix, 'question_word_to_idx_2.pickle'), 'rb')) self.q_dep_to_idx = cPickle.load( open( os.path.join(self.vocab_prefix, 'question_dep_to_idx_2.pickle'), 'rb')) self.q_word_emb = cPickle.load( open(os.path.join(self.vocab_prefix, 'question_word_emb_tensor_2'), 'rb')) self.q_dep_emb = cPickle.load( open(os.path.join(self.vocab_prefix, 'question_dep_emb_tensor_2'))) self.rel_word_to_idx = cPickle.load( open(os.path.join(self.vocab_prefix, 'rel_word_to_idx.pickle'), 'rb')) self.rel_id_to_idx = cPickle.load( open(os.path.join(self.vocab_prefix, 'rel_id_to_idx_word.pickle'), 'rb')) self.rel_word_emb = cPickle.load( open(os.path.join(self.vocab_prefix, 'rel_word_emb_word_tensor'), 'rb')) self.rel_id_emb = cPickle.load( open(os.path.join(self.vocab_prefix, 'rel_id_emb_word_tensor'), 'rb')) self.constraint_id_to_idx, self.constraint_word_to_idx, self.constraint_word_emb, self.constraint_id_emb = None, \ None, None, None if self.use_constraint: self.constraint_id_to_idx = cPickle.load( open( os.path.join(self.vocab_prefix, 'constraint_id_to_idx.pickle'), 'rb')) self.constraint_word_to_idx = cPickle.load( open( os.path.join(self.vocab_prefix, 'constraint_word_to_idx.pickle'), 'rb')) self.constraint_word_emb = cPickle.load( open( os.path.join(self.vocab_prefix, 'constraint_word_emb_tensor'), 'rb')) self.constraint_id_emb = cPickle.load( open( os.path.join(self.vocab_prefix, 'constraint_id_emb_tensor'), 'rb')) if self.do_train: self.train() if self.do_dev: self.dev() if self.do_test: self.test()
set_auto_renew(1) raise SystemExit def stop_web_server(): try: print "Stopping WebServer" server.shutdown() except Exception as ex: print 'Failed to stop WebServer' + str(ex) print 'Welcome to Poloniex Lending Bot' if config_needed: # Configure webserver web_server_enabled = config.has_option( 'BOT', 'startWebServer') and config.getboolean('BOT', 'startWebServer') if config.has_option('BOT', 'customWebServerAddress'): custom_web_server_address = (config.get( 'BOT', 'customWebServerAddress').split(':')) if len(custom_web_server_address) == 1: custom_web_server_address.append("8000") print "WARNING: Please specify a port for the webserver in the form IP:PORT, default port 8000 used." else: custom_web_server_address = ['0.0.0.0', '8000'] else: web_server_enabled = args.startwebserver if args.customwebserveraddress: custom_web_server_address = args.customwebserveraddress else: custom_web_server_address = ['0.0.0.0', '8000'] web_server_ip = custom_web_server_address[0]
def run(ini_file='plot_Qsim_Qobs_Rain.ini'): config = SafeConfigParser() config.read(ini_file) print 'Read the file ', ini_file file_Qsim = config.get('files', 'file_Qsim') file_Qobs = config.get('files', 'file_Qobs') file_rain = config.get('files', 'file_rain') image_out = config.get('files', 'image_out') group_name = config.get('groups', 'group_name') Qobs = config.getboolean('flags', 'Qobs') Pobs = config.getboolean('flags', 'Pobs') nash = config.getboolean('flags', 'nash') tab_col = ['k', 'r'] tab_style = ['-', '-'] tab_width = ['1', '1'] color_P = 'b' transparency_P = 0.5 #(0 for invisible) #create path_out if it does'nt exist ut.check_file_exist(image_out) #Read the obs #Qobs ar_date, ar_Qobs = read_observed_flow(file_Qobs) delta = date2num(ar_date[1]) - date2num(ar_date[0]) #Rain if Pobs: h5file_in = h5.openFile(file_rain, mode='r') group = '/' + group_name + '/' node = h5file_in.getNode(group + 'rainfall') ndar_rain = node.read() h5file_in.close() #Compute the mean catchment rainfall ar_rain = np.average(ndar_rain, axis=1) #Read the simulated data Q file_h5 = file_Qsim ndar_Qc_out = ut.read_one_array_hdf(file_h5, '/Channel/', 'Qc_out') ar_Qsim = ndar_Qc_out[1:, 0] ##Graph fig, ax = plt.subplots() lines = [] tab_leg = [] if Qobs: lines += ax.plot(ar_date, ar_Qobs, color=tab_col[-1], linestyle=tab_style[-1], linewidth=tab_width[-1]) tab_leg.append(('Observation')) tab_leg = tab_leg[::-1] lines += ax.plot(ar_date, ar_Qsim, color=tab_col[0], linestyle=tab_style[0], linewidth=tab_width[0]) tab_leg.append('Model') if nash: nash_value = ut.Nash(ar_Qsim, ar_Qobs) lines += ax.plot(ar_date[0:1], ar_Qsim[0:1], 'w:') tab_leg.append(('Eff = ' + str(nash_value)[0:5])) ax.set_xlim(ar_date[0], ar_date[-1]) ytitle = r'$Q \ (m^3/s)$' ax.set_ylabel(ytitle, fontsize=18) ax.set_title(group_name) ax2 = ax.twinx() ax2.set_ylabel(r'$Rainfall \ (mm)$', fontsize=18, color=color_P) ax2.bar(ar_date, ar_rain, width=delta, facecolor='blue', edgecolor='blue', alpha=transparency_P) ax2.set_ylim(max(ar_rain) * 2, min(ar_rain)) ax2.legend(lines, tab_leg, loc='upper right', fancybox=True) leg = ax2.get_legend() leg.get_frame().set_alpha(0.75) # rotate and align the tick labels so they look better, # unfortunately autofmt_xdate doesn't work with twinx due to a bug # in matplotlib <= 1.0.0 so we do it manually ## fig.autofmt_xdate() bottom = 0.2 rotation = 30 ha = 'right' for ax in fig.get_axes(): if hasattr(ax, 'is_last_row') and ax.is_last_row(): for label in ax.get_xticklabels(): label.set_ha(ha) label.set_rotation(rotation) else: for label in ax.get_xticklabels(): label.set_visible(False) ax.set_xlabel('') fig.subplots_adjust(bottom=bottom) fig.savefig(image_out) plt.show()
class parameters(object): def __init__(self, parfile='default.par'): #config file parser if parfile: self.parser = SafeConfigParser() try: self.parser.readfp(open(parfile, 'rb')) # python 2 except: self.parser.read_file(open(parfile, 'rt', encoding='latin1')) # python 3 self.parfile = parfile self.default_parser = SafeConfigParser() self.default_parser.sections() # section General self.gen_model = self.getpar('General', 'model') self.gen_pretrain = self.getpar('General', 'do_pretrain', 'bool') self.gen_seed = self.getpar('General', 'rnd_seed', 'int') self.gen_momentum = self.getpar('General', 'momentum', 'float') self.gen_verbose = self.getpar('General', 'verbose', 'int') self.in_data = self.getpar('Input', 'data') self.in_restore_model = self.getpar('Input', 'restore_previous_model', 'bool') self.in_prev_model = self.getpar('Input', 'previous_model') self.out_save_predict = self.getpar('Output', 'save_predictions', 'bool') self.out_save_layers = self.getpar('Output', 'save_layers', 'bool') self.out_dir = self.getpar('Output', 'out_dir') self.rbm_layers = self.getpar('RBM', 'layers', 'list-int') self.rbm_gauss_vis = self.getpar('RBM', 'gauss_visible', 'bool') self.rbm_stddev = self.getpar('RBM', 'stddev', 'float') self.rbm_learn_rate = self.getpar('RBM', 'learning_rate', 'float') self.rbm_epochs = self.getpar('RBM', 'epochs', 'int') self.rbm_batch_size = self.getpar('RBM', 'batch_size', 'int') self.rbm_gibbs_k = self.getpar('RBM', 'gibbs_k', 'int') self.fine_act_func = self.getpar('Finetuning', 'act_func') self.fine_learn_rate = self.getpar('Finetuning', 'learning_rate', 'float') self.fine_momentum = self.getpar('Finetuning', 'momentum', 'float') self.fine_epochs = self.getpar('Finetuning', 'epochs', 'int') self.fine_batch_size = self.getpar('Finetuning', 'batch_size', 'int') self.fine_minimiser = self.getpar('Finetuning', 'minimiser') self.fine_loss_func = self.getpar('Finetuning', 'loss_func') self.fine_dropout = self.getpar('Finetuning', 'dropout', 'float') def getpar(self, sec, par, type=None): # get parameter from user defined parser. If parameter is not found there, load the default parameter # the default parameter file parser is self.default_parser, defined in init try: if type == None: try: return self.parser.get(sec, par) except: return self.default_parser.get(sec, par) elif type == 'float': try: return self.parser.getfloat(sec, par) except: return self.default_parser.getfloat(sec, par) elif type == 'bool': try: return self.parser.getboolean(sec, par) except: return self.default_parser.getboolean(sec, par) elif type == 'int': try: return self.parser.getint(sec, par) except: return self.default_parser.getint(sec, par) elif type == 'list-str': try: l = self.parser.get(sec, par).split(',') return [str(m).strip() for m in l] except: l = self.default_parser.get(sec, par).split(',') return [str(m).strip() for m in l] elif type == 'list-float': try: l = self.parser.get(sec, par).split(',') return [float(m) for m in l] except: l = self.default_parser.get(sec, par).split(',') return [float(m) for m in l] elif type == 'list-int': try: l = self.parser.get(sec, par).split(',') return [int(m) for m in l] except: l = self.default_parser.get(sec, par).split(',') return [int(m) for m in l] else: logging.error( 'Cannot set parameter %s in section %s. Parameter type %s not recognized. Set to None' ( par, sec, type)) return None except: logging.error( 'Cannot set parameter %s in section %s. Set to None' % (par, sec)) return None def params_to_dict(self): # covert param variables to dictionary pr = {} for name in dir(self): value = getattr(self, name) if not name.startswith('__') and not inspect.ismethod(value) and \ name != 'parser' and name != 'default_parser' and name != 'console': pr[name] = value return pr
def ReadConfig(): """ Read the config file and set all the variables. """ # Read config file cfg = SafeConfigParser() cfg.optionxform = lambda option: option try: with codecs.open(autosub.CONFIGFILE, 'r', autosub.SYSENCODING) as f: cfg.readfp(f) except: #No config found so we create a default config Message = WriteConfig() return # First we check whether the config has been upgraded if autosub.CONFIGVERSION < version.configversion: upgradeConfig(cfg, autosub.CONFIGVERSION, version.configversion) elif autosub.CONFIGVERSION > version.configversion: print "Config: ERROR! Config version higher then this version of AutoSub supports. Update AutoSub!" os._exit(1) section = 'config' if not cfg.has_section(section): cfg.add_section(section) if cfg.has_option(section, "configversion"): autosub.CONFIGVERSION = cfg.getint("config", "configversion") if cfg.has_option(section, "wantedfirst"): autosub.WANTEDFIRST = cfg.getboolean(section, "wantedfirst") if cfg.has_option(section, 'downloaddutch'): autosub.DOWNLOADDUTCH = cfg.getboolean(section, 'downloaddutch') if cfg.has_option(section, 'downloadeng'): autosub.DOWNLOADENG = cfg.getboolean(section, 'downloadeng') if cfg.has_option(section, "fallbacktoeng"): autosub.FALLBACKTOENG = cfg.getboolean(section, "fallbacktoeng") if cfg.has_option(section, "notifyen"): autosub.NOTIFYEN = cfg.getboolean(section, "notifyen") if cfg.has_option(section, "notifynl"): autosub.NOTIFYNL = cfg.getboolean(section, "notifynl") if cfg.has_option(section, "launchbrowser"): autosub.LAUNCHBROWSER = cfg.getboolean(section, "launchbrowser") if cfg.has_option(section, "skiphiddendirs"): autosub.SKIPHIDDENDIRS = cfg.getboolean(section, "skiphiddendirs") if cfg.has_option(section, "englishsubdelete"): autosub.ENGLISHSUBDELETE = cfg.getboolean(section, "englishsubdelete") if cfg.has_option(section, "podnapisi"): autosub.PODNAPISI = cfg.getboolean(section, "podnapisi") if cfg.has_option(section, "subscene"): autosub.SUBSCENE = cfg.getboolean(section, "subscene") if cfg.has_option(section, "addic7ed"): autosub.ADDIC7ED = cfg.getboolean(section, "addic7ed") if cfg.has_option(section, "opensubtitles"): autosub.OPENSUBTITLES = cfg.getboolean(section, "opensubtitles") if cfg.has_option(section, "hearingimpaired"): autosub.HI = cfg.getboolean(section, "hearingimpaired") if cfg.has_option(section, 'minmatchscore'): autosub.MINMATCHSCORE = cfg.getint(section, 'minmatchscore') if cfg.has_option(section, 'searchinterval'): autosub.SEARCHINTERVAL = cfg.getint(section, 'searchinterval') if cfg.has_option(section, "browserrefresh"): autosub.BROWSERREFRESH = cfg.getint(section, "browserrefresh") if cfg.has_option(section, "path"): autosub.PATH = os.path.normpath(cfg.get(section, "path")) if cfg.has_option(section, "rootpath"): autosub.SERIESPATH = os.path.normpath(cfg.get(section, "rootpath")) if cfg.has_option(section, "seriespath"): autosub.SERIESPATH = os.path.normpath(cfg.get(section, "seriespath")) if autosub.SERIESPATH == '.': autosub.SERIESPATH = u'' if cfg.has_option(section, "bckpath"): autosub.BCKPATH = os.path.normpath(cfg.get(section, "bckpath")) if autosub.BCKPATH == '.': autosub.BCKPATH = u'' if cfg.has_option(section, "subeng"): autosub.SUBENG = cfg.get(section, "subeng") if cfg.has_option(section, "subnl"): autosub.SUBNL = cfg.get(section, "subnl") if cfg.has_option(section, "postprocesscmd"): autosub.POSTPROCESSCMD = cfg.get(section, "postprocesscmd") if cfg.has_option(section, "opensubtitlesuser"): autosub.OPENSUBTITLESUSER = cfg.get(section, "opensubtitlesuser") if cfg.has_option(section, "opensubtitlespasswd"): autosub.OPENSUBTITLESPASSWD = cfg.get(section, "opensubtitlespasswd") if cfg.has_option(section, "addic7eduser"): autosub.ADDIC7EDUSER = cfg.get(section, "addic7eduser") if cfg.has_option(section, "addic7edpasswd"): autosub.ADDIC7EDPASSWD = cfg.get(section, "addic7edpasswd") if cfg.has_option(section, "logfile"): autosub.LOGFILE = cfg.get(section, "logfile") if cfg.has_option(section, "subcodec"): autosub.SUBCODEC = cfg.get(section, "subcodec") if cfg.has_option(section, "skipstringnl"): autosub.SKIPSTRINGNL = cfg.get(section, "skipstringnl") if cfg.has_option(section, "skipstringen"): autosub.SKIPSTRINGEN = cfg.get(section, "skipstringen") if cfg.has_option(section, "skipfoldersnl"): autosub.SKIPFOLDERSNL = cfg.get(section, "skipfoldersnl") if cfg.has_option(section, "skipfoldersen"): autosub.SKIPFOLDERSEN = cfg.get(section, "skipfoldersen") # ******************* # * Logfile Section * # ******************* section = 'logfile' if not cfg.has_section(section): cfg.add_section(section) if cfg.has_option(section, "logfile"): autosub.LOGFILE = cfg.get(section, "logfile") if cfg.has_option(section, "loglevel"): LogLevel = cfg.get(section, "loglevel").upper() if LogLevel == u'ERROR': autosub.LOGLEVEL = logging.ERROR elif LogLevel == u"WARNING": autosub.LOGLEVEL = logging.WARNING elif LogLevel == u"DEBUG": autosub.LOGLEVEL = logging.DEBUG elif LogLevel == u"INFO": autosub.LOGLEVEL = logging.INFO elif LogLevel == u"CRITICAL": autosub.LOGLEVEL = logging.CRITICAL if cfg.has_option(section, "loglevelconsole"): LogLevel = cfg.get(section, "loglevelconsole").upper() if LogLevel == u'ERROR': autosub.LOGLEVELCONSOLE = logging.ERROR elif LogLevel == u"WARNING": autosub.LOGLEVELCONSOLE = logging.WARNING elif LogLevel == u"DEBUG": autosub.LOGLEVELCONSOLE = logging.DEBUG elif LogLevel == u"INFO": autosub.LOGLEVELCONSOLE = logging.INFO elif LogLevel == u"CRITICAL": autosub.LOGLEVELCONSOLE = logging.CRITICAL if cfg.has_option(section, "logsize"): autosub.LOGSIZE = cfg.getint(section, "logsize") if cfg.has_option(section, "lognum"): autosub.LOGNUM = cfg.getint(section, "lognum") # ****************************** # * Cherrypy Webserver Section * # ****************************** section = 'webserver' if not cfg.has_section(section): cfg.add_section(section) if cfg.has_option(section, 'webserverip'): autosub.WEBSERVERIP = str(cfg.get(section, 'webserverip')) if cfg.has_option(section, 'webserverport'): autosub.WEBSERVERPORT = int(cfg.get(section, 'webserverport')) if cfg.has_option(section, 'webroot'): autosub.WEBROOT = str(cfg.get(section, 'webroot')) if cfg.has_option(section, 'username'): autosub.USERNAME = str(cfg.get(section, 'username')) if cfg.has_option(section, 'password'): autosub.PASSWORD = str(cfg.get(section, 'password')) # ******************** # * SkipShow Section * # ******************** section = 'skipshow' if not cfg.has_section(section): cfg.add_section(section) autosub.SKIPSHOWUPPER = {} autosub.SKIPSHOW = {} SkipShows = dict(cfg.items(section)) #autosub.SKIPSHOW = dict(cfg.items('skipshow')) # The following 5 lines convert the skipshow to uppercase. And also convert the variables to a list # also replace the "~" with ":" neccesary because the config parser sees ":" as a delimiter # The UPPER version is for searching, the normal for dispaly in the user interface for show in SkipShows: if re.match("^[0-9 ,.-]+$", SkipShows[show]): autosub.SKIPSHOW[show.replace('~', ':')] = SkipShows[show] autosub.SKIPSHOWUPPER[show.upper().replace('~', ':')] = [ Item.strip() for Item in SkipShows[show].split(',') ] # ******************************** # * Addic7ed Namemapping Section * # ******************************** section = 'addic7edmapping' if not cfg.has_section(section): cfg.add_section(section) autosub.USERADDIC7EDMAPPING = {} try: autosub.USERADDIC7EDMAPPING = dict(cfg.items(section)) except: pass for ImdbId in autosub.USERADDIC7EDMAPPING.iterkeys(): if not (ImdbId.isdigit and autosub.USERADDIC7EDMAPPING[ImdbId].isdigit()): del autosub.USERADDIC7EDMAPPING[ImdbId] print 'ReadConfig: Addic7ed mapping has an unkown format.', ImdbId, ' = ', autosub.USERADDIC7EDMAPPING[ ImdbId] # Settings # **************************** # * User Namemapping Section * # **************************** section = 'namemapping' if not cfg.has_section(section): cfg.add_section(section) NameMapping = dict(cfg.items(section)) autosub.USERNAMEMAPPING = {} for ConfigName in NameMapping.iterkeys(): if NameMapping[ConfigName].isdigit(): Name = ConfigName.replace('~', ':') if not Name.upper() in autosub.NAMEMAPPING.keys(): autosub.NAMEMAPPING[Name.upper()] = [ NameMapping[ConfigName].strip(), u'' ] autosub.USERNAMEMAPPING[Name] = NameMapping[ConfigName].strip() else: print 'ReadConfig: Username mapping has an unknown format.', ConfigName, ' = ', NameMapping[ ConfigName] # ****************** # * Notify Section * # ****************** section = 'notify' if not cfg.has_section(section): cfg.add_section(section) if cfg.has_option(section, 'notifymail'): autosub.NOTIFYMAIL = cfg.getboolean(section, 'notifymail') if cfg.has_option(section, 'mailsrv'): autosub.MAILSRV = cfg.get(section, 'mailsrv') if cfg.has_option(section, 'mailfromaddr'): autosub.MAILFROMADDR = cfg.get(section, 'mailfromaddr') if cfg.has_option(section, 'mailtoaddr'): autosub.MAILTOADDR = cfg.get(section, 'mailtoaddr') if cfg.has_option(section, 'mailusername'): autosub.MAILUSERNAME = cfg.get(section, 'mailusername') if cfg.has_option(section, 'mailpassword'): autosub.MAILPASSWORD = cfg.get(section, 'mailpassword') if cfg.has_option(section, 'mailsubject'): autosub.MAILSUBJECT = cfg.get(section, 'mailsubject') if cfg.has_option(section, 'mailencryption'): autosub.MAILENCRYPTION = cfg.get(section, 'mailencryption') if cfg.has_option(section, 'mailauth'): autosub.MAILAUTH = cfg.get(section, 'mailauth') if cfg.has_option(section, 'notifygrowl'): autosub.NOTIFYGROWL = cfg.getboolean(section, 'notifygrowl') if cfg.has_option(section, 'growlhost'): autosub.GROWLHOST = cfg.get(section, 'growlhost') if cfg.has_option(section, 'growlport'): autosub.GROWLPORT = cfg.get(section, 'growlport') if cfg.has_option(section, 'growlpass'): autosub.GROWLPASS = cfg.get(section, 'growlpass') if cfg.has_option(section, 'notifytwitter'): autosub.NOTIFYTWITTER = cfg.getboolean(section, 'notifytwitter') if cfg.has_option(section, 'twitterkey'): autosub.TWITTERKEY = cfg.get(section, 'twitterkey') if cfg.has_option(section, 'twittersecret'): autosub.TWITTERSECRET = cfg.get(section, 'twittersecret') if cfg.has_option(section, 'notifynma'): autosub.NOTIFYNMA = cfg.getboolean(section, 'notifynma') if cfg.has_option(section, 'nmaapi'): autosub.NMAAPI = cfg.get(section, 'nmaapi') if cfg.has_option(section, 'nmapriority'): autosub.NMAPRIORITY = cfg.getint(section, 'nmapriority') if cfg.has_option(section, 'notifyprowl'): autosub.NOTIFYPROWL = cfg.getboolean(section, 'notifyprowl') if cfg.has_option(section, 'prowlapi'): autosub.PROWLAPI = cfg.get(section, 'prowlapi') if cfg.has_option(section, 'prowlpriority'): autosub.PROWLPRIORITY = cfg.getint(section, 'prowlpriority') if cfg.has_option(section, 'notifytelegram'): autosub.NOTIFYTELEGRAM = cfg.getboolean(section, 'notifytelegram') if cfg.has_option(section, 'telegramapi'): autosub.TELEGRAMAPI = cfg.get(section, 'telegramapi') if cfg.has_option(section, 'telegramid'): autosub.TELEGRAMID = cfg.get(section, 'telegramid') if cfg.has_option(section, 'notifypushalot'): autosub.NOTIFYPUSHALOT = cfg.getboolean(section, 'notifypushalot') if cfg.has_option(section, 'pushalotapi'): autosub.PUSHALOTAPI = cfg.get(section, 'pushalotapi') if cfg.has_option(section, 'notifypushbullet'): autosub.NOTIFYPUSHBULLET = cfg.getboolean(section, 'notifypushbullet') if cfg.has_option(section, 'pushbulletapi'): autosub.PUSHBULLETAPI = cfg.get(section, 'pushbulletapi') if cfg.has_option(section, 'notifypushover'): autosub.NOTIFYPUSHOVER = cfg.getboolean(section, 'notifypushover') if cfg.has_option(section, 'pushoverappkey'): autosub.PUSHOVERAPPKEY = cfg.get(section, 'pushoverappkey') if cfg.has_option(section, 'pushoveruserkey'): autosub.PUSHOVERUSERKEY = cfg.get(section, 'pushoveruserkey') if cfg.has_option(section, 'pushoverpriority'): autosub.PUSHOVERPRIORITY = cfg.getint(section, 'pushoverpriority') if cfg.has_option(section, 'notifyboxcar2'): autosub.NOTIFYBOXCAR2 = cfg.getboolean(section, 'notifyboxcar2') if cfg.has_option(section, 'boxcar2token'): autosub.BOXCAR2TOKEN = cfg.get(section, 'boxcar2token') if cfg.has_option(section, 'notifyplex'): autosub.NOTIFYPLEX = cfg.getboolean(section, 'notifyplex') if cfg.has_option(section, 'plexserverhost'): autosub.PLEXSERVERHOST = cfg.get(section, 'plexserverhost') if cfg.has_option(section, 'plexserverport'): autosub.PLEXSERVERPORT = cfg.get(section, 'plexserverport') if cfg.has_option(section, 'plexserverusername'): autosub.PLEXSERVERUSERNAME = cfg.get(section, 'plexserverusername') if cfg.has_option(section, 'plexserverpassword'): autosub.PLEXSERVERPASSWORD = cfg.get(section, 'plexserverpassword')
CONFIG = os.path.join(os.path.dirname(__file__), "config.ini") # get the configuration items if os.path.isfile(CONFIG): config = SafeConfigParser() config.read(CONFIG) SOCKET = config.get('Milter', 'SOCKET') try: UMASK = int(config.get('Milter', 'UMASK'), base=0) except: UMASK = 0o0077 TIMEOUT = config.getint('Milter', 'TIMEOUT') MAX_FILESIZE = config.getint('Milter', 'MAX_FILESIZE') MESSAGE = config.get('Milter', 'MESSAGE') MAX_ZIP = config.getint('Milter', 'MAX_ZIP') REJECT_MESSAGE = config.getboolean('Milter', 'REJECT_MESSAGE') LOGFILE_DIR = config.get('Logging', 'LOGFILE_DIR') LOGFILE_NAME = config.get('Logging', 'LOGFILE_NAME') LOGLEVEL = config.getint('Logging', 'LOGLEVEL') else: sys.exit("Please check the config file! Config path: %s" % CONFIG) # ============================================================================= LOGFILE_PATH = os.path.join(LOGFILE_DIR, LOGFILE_NAME) HASHTABLE_PATH = os.path.join(LOGFILE_DIR, "hashtable.db") # fallback if a file can't detect by the file magic EXTENSIONS = ".dot", ".doc", ".xls", ".docm", ".dotm", ".xlsm", ".xlsb", ".pptm", ".ppsm", ".rtf", ".mht" # Set up a specific logger with our desired output level log = logging.getLogger('MacroMilter')
#!/usr/bin/env python from ConfigParser import SafeConfigParser config = SafeConfigParser() config.read('config.ini') # Read config.ini and store into variables HOST = config.get('app', 'HOST') PORT = int(config.get('app', 'PORT')) DEBUG = config.getboolean('app', 'DEBUG') TEMP_FILE_DIR = config.get('app', 'TEMP_FILE_DIR') SESSION_KEY = config.get('app', 'SESSION_KEY') DOMAIN_URL = config.get('app', 'DOMAIN_URL') SHORT_DOMAIN_URL = config.get('app', 'SHORT_DOMAIN_URL') ADMIN_EMAIL = config.get('app', 'ADMIN_EMAIL') EXCEPTION_EMAIL = config.get('app', 'EXCEPTION_EMAIL').split(',') # Caching Configs CACHE_TYPE = config.get('caching', 'CACHE_TYPE') CACHE_SERVER_ADDRESS = config.get('caching', 'CACHE_SERVER_ADDRESS').split() CACHE_DEFAULT_TIMEOUT = config.getint('caching', 'CACHE_DEFAULT_TIMEOUT') # Mongo Configs MONGO_DATABASE = config.get('mongodb', 'DATABASE') MONGO_HOST = config.get('mongodb', 'HOST') MONGO_PORT = int(config.get('mongodb', 'PORT')) MONGO_USER = config.get('mongodb', 'USER') MONGO_PASS = config.get('mongodb', 'PASS') # S3 Configs
def parseExptConfig(configFile, librariesToSublibrariesDict): parser = SafeConfigParser() results = parser.read(configFile) if len(results) == 0: return None, 1, 'Experiment config file not found' #output variables paramDict = dict() exitStatus = 0 warningString = '' ##check all sections expectedSections = set([ 'experiment_settings', 'library_settings', 'counts_files', 'filter_settings', 'sgrna_analysis', 'growth_values', 'gene_analysis' ]) parsedSections = set(parser.sections()) if len(expectedSections) != len(parsedSections) and len( expectedSections) != len( expectedSections.intersection(parsedSections)): return paramDict, 1, 'Config file does not have all required sections or has extraneous sections!\nExpected:' + ','.join( expectedSections) + '\nFound:' + ','.join(parsedSections) ##experiment settings if parser.has_option('experiment_settings', 'output_folder'): paramDict['output_folder'] = parser.get( 'experiment_settings', 'output_folder') #ways to check this is a valid path? else: warningString += 'No output folder specified, defaulting to current directory\n.' paramDict['output_folder'] = os.curdir() if parser.has_option('experiment_settings', 'experiment_name'): paramDict['experiment_name'] = parser.get('experiment_settings', 'experiment_name') else: warningString += 'No experiment name specified, defaulting to \'placeholder_expt_name\'\n.' paramDict['experiment_name'] = 'placeholder_expt_name' ##library settings libraryDict = librariesToSublibrariesDict if parser.has_option('library_settings', 'library'): parsedLibrary = parser.get('library_settings', 'library') if parsedLibrary.lower() in libraryDict: paramDict['library'] = parsedLibrary.lower() else: warningString += 'Library name \"%s\" not recognized\n' % parsedLibrary exitStatus += 1 else: warningString += 'No library specified\n' exitStatus += 1 parsedLibrary = '' if 'library' in paramDict: if parser.has_option('library_settings', 'sublibraries'): parsedSubList = parser.get('library_settings', 'sublibraries').strip().split('\n') paramDict['sublibraries'] = [] for sub in parsedSubList: sub = sub.lower() if sub in libraryDict[paramDict['library']]: paramDict['sublibraries'].append(sub) else: warningString += 'Sublibrary %s not recognized\n' % sub else: paramDict['sublibraries'] = libraryDict[paramDict['library']] ##counts files if parser.has_option('counts_files', 'counts_file_string'): countsFileString = parser.get('counts_files', 'counts_file_string').strip() paramDict['counts_file_list'] = [] for stringLine in countsFileString.split('\n'): stringLine = stringLine.strip() if len(stringLine.split(':')) != 2 or len( stringLine.split('|')) != 2: warningString += 'counts file entry could not be parsed: ' + stringLine + '\n' exitStatus += 1 else: parsedPath = stringLine.split(':')[0] if os.path.isfile(parsedPath) == False: warningString += 'Counts file not found: ' + parsedPath + '\n' exitStatus += 1 condition, replicate = stringLine.split(':')[1].split('|') paramDict['counts_file_list'].append( (condition, replicate, parsedPath)) else: warningString += 'No counts files entered\n' exitStatus += 1 ##filter settings filterOptions = ['either', 'both'] if parser.has_option('filter_settings', 'filter_type') and parser.get( 'filter_settings', 'filter_type').lower() in filterOptions: paramDict['filter_type'] = parser.get('filter_settings', 'filter_type').lower() else: warningString += 'Filter type not set or not recognized, defaulting to \'either\'\n' paramDict['filter_type'] = 'either' if parser.has_option('filter_settings', 'minimum_reads'): try: paramDict['minimum_reads'] = parser.getint('filter_settings', 'minimum_reads') except ValueError: warningString += 'Minimum read value not an integer, defaulting to 0\n' #recommended value is 50 but seems arbitrary to default to that paramDict['minimum_reads'] = 0 else: warningString += 'Minimum read value not found, defaulting to 0\n' #recommended value is 50 but seems arbitrary to default to that paramDict['minimum_reads'] = 0 ##sgRNA Analysis if parser.has_option('sgrna_analysis', 'condition_string'): conditionString = parser.get('sgrna_analysis', 'condition_string').strip() paramDict['condition_tuples'] = [] if 'counts_file_list' in paramDict: expectedConditions = set(zip(*paramDict['counts_file_list'])[0]) else: expectedConditions = [] enteredConditions = set() for conditionStringLine in conditionString.split('\n'): conditionStringLine = conditionStringLine.strip() if len(conditionStringLine.split(':')) != 3: warningString += 'Phenotype condition line not understood: ' + conditionStringLine + '\n' exitStatus += 1 else: phenotype, condition1, condition2 = conditionStringLine.split( ':') if condition1 not in expectedConditions or condition2 not in expectedConditions: warningString += 'One of the conditions entered does not correspond to a counts file: ' + conditionStringLine + '\n' exitStatus += 1 else: paramDict['condition_tuples'].append( (phenotype, condition1, condition2)) enteredConditions.add(condition1) enteredConditions.add(condition2) if len(paramDict['condition_tuples']) == 0: warningString += 'No phenotype score/condition pairs found\n' exitStatus += 1 unusedConditions = list(expectedConditions - enteredConditions) if len(unusedConditions) > 0: warningString += 'Some conditions assigned to counts files will not be incorporated in sgRNA analysis:\n' \ + ','.join(unusedConditions) + '\n' else: warningString += 'No phenotype score/condition pairs entered\n' exitStatus += 1 pseudocountOptions = ['zeros only', 'all values', 'filter out'] if parser.has_option( 'sgrna_analysis', 'pseudocount_behavior') and parser.get( 'sgrna_analysis', 'pseudocount_behavior').lower() in pseudocountOptions: paramDict['pseudocount_behavior'] = parser.get( 'sgrna_analysis', 'pseudocount_behavior').lower() else: warningString += 'Pseudocount behavior not set or not recognized, defaulting to \'zeros only\'\n' paramDict['pseudocount_behavior'] = 'zeros only' if parser.has_option('sgrna_analysis', 'pseudocount'): try: paramDict['pseudocount'] = parser.getfloat('sgrna_analysis', 'pseudocount') except ValueError: warningString += 'Pseudocount value not an number, defaulting to 0.1\n' paramDict['pseudocount'] = 0.1 else: warningString += 'Pseudocount value not found, defaulting to 0.1\n' paramDict['pseudocount'] = 0.1 ##Growth Values if parser.has_option('growth_values', 'growth_value_string') and len( parser.get('growth_values', 'growth_value_string').strip()) != 0: growthValueString = parser.get('growth_values', 'growth_value_string').strip() if 'condition_tuples' in paramDict and 'counts_file_list' in paramDict: expectedComparisons = set(zip(*paramDict['condition_tuples'])[0]) expectedReplicates = set(zip(*paramDict['counts_file_list'])[1]) expectedTupleList = [] for comp in expectedComparisons: for rep in expectedReplicates: expectedTupleList.append((comp, rep)) else: expectedTupleList = [] enteredTupleList = [] growthValueTuples = [] for growthValueLine in growthValueString.split('\n'): growthValueLine = growthValueLine.strip() linesplit = growthValueLine.split(':') if len(linesplit) != 3: warningString += 'Growth value line not understood: ' + growthValueLine + '\n' exitStatus += 1 continue comparison = linesplit[0] replicate = linesplit[1] try: growthVal = float(linesplit[2]) except ValueError: warningString += 'Growth value not a number: ' + growthValueLine + '\n' exitStatus += 1 continue curTup = (comparison, replicate) if curTup in expectedTupleList: if curTup not in enteredTupleList: enteredTupleList.append(curTup) growthValueTuples.append( (comparison, replicate, growthVal)) else: warningString += ':'.join( curTup) + ' has multiple growth values entered\n' exitStatus += 1 else: warningString += ':'.join( curTup ) + ' was not expected given the specified counts file assignments and sgRNA phenotypes\n' exitStatus += 1 #because we enforced no duplicates or unexpected values these should match up unless there were values not entered #require all growth values to be explictly entered if some were if len(enteredTupleList) != len(expectedTupleList): warningString += 'Growth values were not entered for all expected comparisons/replicates. Expected: ' + \ ','.join([':'.join(tup) for tup in expectedTupleList]) + '\nEntered: ' + \ ','.join([':'.join(tup) for tup in enteredTupleList]) + '\n' exitStatus += 1 else: paramDict['growth_value_tuples'] = growthValueTuples else: warningString += 'No growth values--all phenotypes will be reported as log2enrichments\n' paramDict['growth_value_tuples'] = [] if 'condition_tuples' in paramDict and 'counts_file_list' in paramDict: expectedComparisons = set(zip(*paramDict['condition_tuples'])[0]) expectedReplicates = set(zip(*paramDict['counts_file_list'])[1]) for comp in expectedComparisons: for rep in expectedReplicates: paramDict['growth_value_tuples'].append((comp, rep, 1)) ##Gene Analysis if parser.has_option('gene_analysis', 'collapse_to_transcripts'): try: paramDict['collapse_to_transcripts'] = parser.getboolean( 'gene_analysis', 'collapse_to_transcripts') except ValueError: warningString += 'Collapse to transcripts entry not a recognized boolean value\n' exitStatus += 1 else: paramDict['collapse_to_transcripts'] = True warningString += 'Collapse to transcripts defaulting to True\n' #pseudogene parameters if parser.has_option('gene_analysis', 'generate_pseudogene_dist'): paramDict['generate_pseudogene_dist'] = parser.get( 'gene_analysis', 'generate_pseudogene_dist').lower() if paramDict['generate_pseudogene_dist'] not in [ 'auto', 'manual', 'off' ]: warningString += 'Generate pseudogene dist entry not a recognized option\n' exitStatus += 1 else: paramDict['generate_pseudogene_dist'] = False warningString += 'Generate pseudogene dist defaulting to False\n' if 'generate_pseudogene_dist' in paramDict and paramDict[ 'generate_pseudogene_dist'] == 'manual': if parser.has_option('gene_analysis', 'pseudogene_size'): try: paramDict['pseudogene_size'] = parser.getint( 'gene_analysis', 'pseudogene_size') except ValueError: warningString += 'Pseudogene size entry not a recognized integer value\n' exitStatus += 1 else: warningString += 'No pseudogene size provided\n' exitStatus += 1 if parser.has_option('gene_analysis', 'num_pseudogenes'): try: paramDict['num_pseudogenes'] = parser.getint( 'gene_analysis', 'num_pseudogenes') except ValueError: warningString += 'Pseudogene number entry not a recognized integer value\n' exitStatus += 1 else: warningString += 'No pseudogene size provided\n' #list possible analyses in param dict as dictionary with keys = analysis and values = analysis-specific params paramDict['analyses'] = dict() #analyze by average of best n if parser.has_option('gene_analysis', 'calculate_ave'): try: if parser.getboolean('gene_analysis', 'calculate_ave') == True: paramDict['analyses']['calculate_ave'] = [] except ValueError: warningString += 'Calculate ave entry not a recognized boolean value\n' exitStatus += 1 if 'calculate_ave' in paramDict['analyses']: if parser.has_option('gene_analysis', 'best_n'): try: paramDict['analyses']['calculate_ave'].append( parser.getint('gene_analysis', 'best_n')) except ValueError: warningString += 'Best_n entry not a recognized integer value\n' exitStatus += 1 else: warningString += 'No best_n value provided for average analysis function\n' exitStatus += 1 else: warningString += 'Best n average analysis not specified, defaulting to False\n' #analyze by Mann-Whitney if parser.has_option('gene_analysis', 'calculate_mw'): try: if parser.getboolean('gene_analysis', 'calculate_mw') == True: paramDict['analyses']['calculate_mw'] = [] except ValueError: warningString += 'Calculate Mann-Whitney entry not a recognized boolean value\n' exitStatus += 1 #analyze by K-S, skipping for now #analyze by nth best sgRNA if parser.has_option('gene_analysis', 'calculate_nth'): try: if parser.getboolean('gene_analysis', 'calculate_nth') == True: paramDict['analyses']['calculate_nth'] = [] except ValueError: warningString += 'Calculate best Nth sgRNA entry not a recognized boolean value\n' exitStatus += 1 if 'calculate_nth' in paramDict['analyses']: if parser.has_option('gene_analysis', 'nth'): try: paramDict['analyses']['calculate_nth'].append( parser.getint('gene_analysis', 'nth')) except ValueError: warningString += 'Nth best sgRNA entry not a recognized integer value\n' exitStatus += 1 else: warningString += 'No Nth best value provided for that analysis function\n' exitStatus += 1 else: warningString += 'Nth best sgRNA analysis not specified, defaulting to False\n' if len(paramDict['analyses']) == 0: warningString += 'No analyses selected to compute gene scores\n' #should this raise exitStatus? return paramDict, exitStatus, warningString
'--config', default='config.ini', help='Config file to be used. Default: "config.ini"') args = arg_parser.parse_args() # Parse config parameters config_file = args.config if not os.path.exists(config_file): print '\n**Config file %s not found. Exiting.' % config_file sys.exit(1) conf_parser = SafeConfigParser() conf_parser.read(config_file) OBIEE_VERSION = conf_parser.get('OBIEE', 'OBIEE_VERSION') CLIENT_ONLY = conf_parser.getboolean('OBIEE', 'CLIENT_ONLY') OBIEE_HOME = os.path.abspath(conf_parser.get('OBIEE', 'OBIEE_HOME')) # Optional path setting for clients and server and mixed installations OBIEE_CLIENT = os.path.abspath(conf_parser.get('OBIEE', 'OBIEE_CLIENT')) if CLIENT_ONLY is False and OBIEE_CLIENT == '': OBIEE_CLIENT = os.path.join(OBIEE_HOME, 'user_projects', 'domains') elif CLIENT_ONLY is True and OBIEE_CLIENT == '': OBIEE_CLIENT = OBIEE_HOME else: OBIEE_CLIENT = os.path.abspath(conf_parser.get('OBIEE', 'OBIEE_CLIENT')) RPD_PW = conf_parser.get('OBIEE', 'RPD_PW') # Initiliases bi-init and runcat command variables
class PartyCrasher(object): def __init__(self, config_file=None): self.config = ConfigParser(default_config()) self._checked_index_exists = False # TODO: Abstract config out. if config_file is not None: self.config.readfp(config_file) self.thresholds = (self.config.get('partycrasher.bucket', 'thresholds').split()) # self.es and self.bucketer are lazy properties. self._es = None self._bucketer = None self._checked_index_exists = False @property def es_servers(self): """ Configured ES server list """ return self.config.get('partycrasher.elastic', 'hosts').split() @property def es_index(self): """ Configured ES server list """ return self.config.get('partycrasher.elastic', 'indexbase') @property def allow_delete_all(self): """ Whether or not the instance should allow all data to be deleted at once """ return self.config.getboolean('partycrasher.elastic', 'allow_delete_all') @property def es(self): """ ElasticSearch instance. """ if not self._es: self._connect_to_elasticsearch() return self._es @property def bucketer(self): """ Bucketer instance. """ if not self._bucketer: self._connect_to_elasticsearch() return self._bucketer @property def default_threshold(self): """ Default threshould to use if none are provided. """ # TODO: determine from static/dynamic configuration return Threshold( self.config.get('partycrasher.bucket', 'default_threshold')) def delete_and_recreate_index(self): """ Deletes the entire index and recreates it. This destroys all of the reports. """ assert self.allow_delete_all self.es.indices.delete(index=self.es_index) self.es.cluster.health(wait_for_status='yellow') self._bucketer.create_index() self.es.cluster.health(wait_for_status='yellow') def _connect_to_elasticsearch(self): """ Establishes a connection to ElasticSearch. given configuration. """ self._es = Elasticsearch( self.es_servers, retry_on_timeout=True, ) # XXX: Monkey-patch our instance to the global. ESCrash.es = self._es tokenization_name = self.config.get('partycrasher.bucket', 'tokenization') print("Using bucketer: %s" % (tokenization_name), file=sys.stderr) tokenization = locate(tokenization_name) self._bucketer = tokenization(thresholds=self.thresholds, lowercase=False, index=self.es_index, elasticsearch=self.es, config=self.config) if not self._checked_index_exists: if self._es.indices.exists(self.es_index): self._checked_index_exists = True else: self._bucketer.create_index() self.es.cluster.health(wait_for_status='yellow') return self._es def ingest(self, crash, dryrun=False): """ Ingest a crash; the Crash may be a simple dictionary, or a pre-existing Crash instance. :return: the saved crash :rtype Crash: :raises IdenticalReportError: """ true_crash = Crash(crash) if 'stacktrace' in true_crash: assert isinstance(true_crash['stacktrace'], Stacktrace) assert isinstance(true_crash['stacktrace'][0], Stackframe) if 'address' in true_crash['stacktrace'][0]: assert isinstance(true_crash['stacktrace'][0]['address'], basestring) if dryrun: true_crash['buckets'] = self.bucketer.assign_buckets(true_crash) return true_crash else: return self.bucketer.assign_save_buckets(true_crash) def get_bucket(self, threshold, bucket_id, project=None, from_=None, size=None): """ Returns information for the given bucket. """ # Coerce to a Threshold object. threshold = Threshold(threshold) query = { "query": { "constant_score": { "filter": { "term": { "buckets." + threshold.to_elasticsearch(): bucket_id } } } }, "sort": { "date": { "order": "desc" } }, #"aggregations": { #"significant": { #"significant_terms": { #"field": "_all", #"mutual_information": {}, #"size": 100 #} #} #} } if from_ is not None: query["from"] = from_ query["size"] = size response = self.es.search(body=query, index=self.es_index) with open('bucket_response', 'wb') as debug_file: print(json.dumps(response, indent=2), file=debug_file) reports_found = response['hits']['total'] # Since no reports were found, assume the bucket does not exist (at # least for this project). if reports_found < 1: raise BucketNotFoundError(bucket_id) reports = get_reports_by_bucket(response, threshold).get(bucket_id) assert reports return Bucket(id=bucket_id, project=project, threshold=threshold, total=reports_found, top_reports=reports, first_seen=None) def top_buckets(self, lower_bound, threshold=None, project=None, from_=None, size=None, upper_bound=None, query_string=None): """ Given a datetime lower_bound (from date), calculates the top buckets in the given timeframe for the given threshold (automatically determined if not given). The results can be tailed for a specific project if needed. Returns a list of {'doc_count': int, 'key': id} dictionaries. """ if not isinstance(lower_bound, datetime): raise TypeError('The lower bound MUST be a datetime object.') # Get the default threshold. if threshold is None: threshold = self.default_threshold if not isinstance(threshold, Threshold): threshold = Threshold(threshold) # Filters by lower-bound by default; filters = [{"range": {"date": {"gt": lower_bound.isoformat()}}}] if upper_bound is not None: filters[0]["range"]["date"]["lt"] = upper_bound.isoformat() # May filter optionally by project name. if project is not None: filters.append({"term": {"project": project}}) # this doesn't work on ES 2.3! if query_string is not None: print("Query string!", file=sys.stderr) filters.append({ "query": { "query_string": { "query": query_string, "default_operator": "AND", } } }) # Oh, ElasticSearch! You and your verbose query "syntax"! query = { # Read this inside out: "aggs": { "top_buckets_filtered": { # Filter the top buckets by date, and maybe by project. "filter": { "bool": { "must": filters } }, # Get the top buckets in descending order of size. "aggs": { "top_buckets": { "terms": { "field": "buckets." + threshold.to_elasticsearch(), "order": { "_count": "desc" }, }, # Get the date of the latest crash per bucket. "aggs": { "first_seen": { "min": { "field": "date" } } } } } } }, # Do not send any hits back! "size": 0 } if size is None: size = 10 actual_size = size if from_ is not None: assert from_ >= 0 actual_size = actual_size + from_ if size is not None: assert size >= 0 (query["aggs"]["top_buckets_filtered"]["aggs"]["top_buckets"] ["terms"]["size"]) = actual_size try: response = self.es.search(body=query, index=self.es_index) except RequestError as e: print(e.error, file=sys.stderr) raise e # Oh, ElasticSearch! You and your verbose responses! top_buckets = (response['aggregations']['top_buckets_filtered'] ['top_buckets']['buckets']) if from_ is not None: top_buckets = top_buckets[from_:] return [ Bucket(id=bucket['key'], project=project, threshold=threshold, total=bucket['doc_count'], first_seen=bucket['first_seen']['value_as_string'], top_reports=None) for bucket in top_buckets ] def get_crash(self, database_id, project): self._connect_to_elasticsearch() crash = None try: crash = ESCrash(database_id, index=self.es_index) except NotFoundError as e: raise KeyError(database_id) response = self.es.termvectors(index=self.es_index, doc_type='crash', id=database_id, fields='stacktrace.function.whole', term_statistics=True, offsets=False, positions=False) #with open('termvectors', 'wb') as termvectorsfile: #print(json.dumps(response, indent=2), file=termvectorsfile) if 'stacktrace.function.whole' in response['term_vectors']: vectors = response['term_vectors']['stacktrace.function.whole'] all_doc_count = float(vectors['field_statistics']['doc_count']) crash = Crash(crash) # Sometimes there's extra functions on top of the stack for # logging/cleanup/handling/rethrowing/whatever that get called # after the fault but before the trace is generated, and are # present for multiple crash locations. So except on the # full detail page, we don't want to display them. # This is for that. for frame in crash['stacktrace']: if 'function' in frame and frame['function']: function = frame['function'] term = vectors['terms'][function] relativedf = float(term['doc_freq']) / all_doc_count logdf = -1.0 * math.log(relativedf, 2) #print(logdf, file=sys.stderr) frame['logdf'] = logdf return crash def get_summary(self, database_id, project): self._connect_to_elasticsearch() try: return self.bucketer.bucket_explain(database_id) except NotFoundError as e: raise KeyError(database_id) def compare(self, database_id, other_ids): self._connect_to_elasticsearch() try: return self.bucketer.compare(database_id, other_ids) except NotFoundError as e: raise KeyError(database_id) def get_projects(self): """ Returns the list of all projects found in Elasticsearch. """ query = {"aggs": {"projects": {"terms": {"field": "project"}}}} try: results = self.es.search(body=query, index=self.es_index) except TransportError: # Occurs when the index has just been freshly created. return None raw_projects = results['aggregations']['projects']['buckets'] return [Project(project['key']) for project in raw_projects] def ensure_index_created(self): """ Ensure that the index exists. """ self._connect_to_elasticsearch() return self def search(self, query_string, since=None, until=None, project=None, from_=None, size=None, sort=None, order=None): es_query = { "query": { "bool": { "must": [ { "query_string": { "query": query_string, # This is necessary due to how we tokenize things # which is not on whitespace I.E. if the user # searched for CamelCaseThing it will be interpreted # as a search for Camel AND Case AND Thing rather # than Camel OR Case OR Thing "default_operator": "AND", } }, ] } }, } if sort is not None: if order is None: order = "desc" es_query["sort"] = [{sort: {"order": order}}] if project is not None: es_query['query']['bool']['must'].append( {"term": { "project": project }}) if (since is not None) or (until is not None): date_bounds = {} if since is not None: date_bounds['gt'] = since.isoformat() if until is not None: date_bounds['lt'] = until.isoformat() es_query['query']['bool']['must'].append( {"range": { "date": date_bounds }}) if from_ is not None: es_query["from"] = from_ if size is not None: es_query["size"] = size try: r = self.es.search(index=self.es_index, body=es_query) except RequestError as e: # TODO: use logger print(e.info, file=sys.stderr) raise except TransportError as e: # TODO: use logger print(e.info, file=sys.stderr) raise raw_hits = r['hits']['hits'] #print(json.dumps(raw_hits, indent=2), file=sys.stderr) results = [] for hit in raw_hits: report = hit['_source'] crash = Crash(report) results.append(crash) return results
CONFIG = os.path.join(os.path.dirname(__file__), "config.ini") # get the configuration items if os.path.isfile(CONFIG): config = SafeConfigParser() config.read(CONFIG) SOCKET = config.get('Milter', 'SOCKET') try: UMASK = int(config.get('Milter', 'UMASK'), base=0) except: UMASK = 0o0077 TIMEOUT = config.getint('Milter', 'TIMEOUT') MAX_FILESIZE = config.getint('Milter', 'MAX_FILESIZE') MESSAGE = config.get('Milter', 'MESSAGE') MAX_ZIP = config.getint('Milter', 'MAX_ZIP') REJECT_MESSAGE = config.getboolean('Milter', 'REJECT_MESSAGE') LOGFILE_DIR = config.get('Logging', 'LOGFILE_DIR') LOGFILE_NAME = config.get('Logging', 'LOGFILE_NAME') LOGLEVEL = config.getint('Logging', 'LOGLEVEL') DUMP_BODY = config.getboolean('Milter', 'DUMP_BODY') else: sys.exit("Please check the config file! Config path: %s" % CONFIG) # ============================================================================= LOGFILE_PATH = os.path.join(LOGFILE_DIR, LOGFILE_NAME) HASHTABLE_PATH = os.path.join(LOGFILE_DIR, "hashtable.db") # Config check if DUMP_BODY is None: DUMP_BODY = False
def get_config(args=None): class NotSet: pass notset = NotSet() parser = ArgumentParser(description="Analyze a given position.") parser.add_argument("-b", "--bot", help="Which engine to use in config file") parser.add_argument("-c", "--config", default="analyze.cfg", help="Configuration file to use.") parser.add_argument("--log", help="Set log output level.") parser.add_argument("--strict-checks", action="store_true", default=notset, help="Use strict checking on move legality") parser.add_argument("--skip-checks", action="store_false", dest="strict_checks", help="Skip extra legality checks for moves") parser.add_argument( "--strict-setup", action="store_true", default=notset, help="Require the setup moves to be complete and legal") parser.add_argument( "--allow-setup", dest="strict_setup", action="store_false", help="Allow incomplete or otherwise illegal setup moves") parser.add_argument("position_file", help="File with board or move list") parser.add_argument("move_number", help="Move to analyze", nargs="?") args = parser.parse_args(args) config = SafeConfigParser() if config.read(args.config) != [args.config]: print "Could not open '%s'" % (args.config, ) sys.exit(1) try: loglevel = config.get("global", "log_level") except NoOptionError: loglevel = None loglevel = loglevel if args.log is None else args.log if loglevel is not None: loglevel = logging.getLevelName(loglevel) if not isinstance(loglevel, int): print "Bad log level \"%s\", use ERROR, WARNING, INFO or DEBUG." % ( loglevel, ) sys.exit(1) logging.basicConfig(level=loglevel) if args.strict_checks is notset: try: args.strict_checks = config.getboolean("global", "strict_checks") except NoOptionError: args.strict_checks = False if args.strict_setup is notset: try: args.strict_setup = config.getboolean("global", "strict_setup") except NoOptionError: args.strict_setup = None try: args.search_position = config.getboolean("global", "search_position") except NoOptionError: args.search_position = True if args.bot is None: args.bot = config.get("global", "default_engine") cfg_sections = config.sections() if args.bot not in cfg_sections: print "Engine configuration for %s not found in config." % (args.bot, ) print "Available configs are:", for section in cfg_sections: if section != "global": print section, print sys.exit(1) try: args.com_method = config.get(args.bot, "communication_method").lower() except NoOptionError: args.com_method = "stdio" try: args.enginecmd = config.get(args.bot, "cmdline") except NoOptionError: print "No engine command line found in config file." print "Add cmdline option for engine %s" % (args.bot, ) sys.exit(1) args.bot_options = list() for option in config.options(args.bot): if option.startswith("bot_"): value = config.get(args.bot, option) args.bot_options.append((option[4:], value)) args.post_options = list() for option in config.options(args.bot): if option.startswith("post_pos_"): value = config.get(args.bot, option) args.post_options.append((option[9:], value)) return args
'akismet_api_key': None, 'facebook_token': None, } CONFIG_FILE = os.environ.get('JARDINFAQ_CONFIG_FILE', os.path.join(PROJECT_ROOT, 'jardinfaq.cfg')) CONFIG = SafeConfigParser(defaults=DEFAULT_CONFIG, allow_no_value=True) CONFIG.read([ CONFIG_FILE, '/etc/jardinfaq.cfg', os.path.expanduser('~/.jardinfaq.cfg') ]) map(lambda i: CONFIG.set('DEFAULT', i[0], i[1]), ENV_VARS.items()) site.addsitedir(os.path.join(ASKBOT_ROOT, 'deps')) DEBUG = CONFIG.getboolean('DEFAULT', 'debug') TEMPLATE_DEBUG = CONFIG.getboolean('DEFAULT', 'template_debug') ALLOWED_HOSTS = CONFIG.get('DEFAULT', 'allowed_hosts').split(',') INTERNAL_IPS = CONFIG.get('DEFAULT', 'internal_ips').split(',') SITE_ID = 1 SECRET_KEY = CONFIG.get('DEFAULT', 'secret_key') # i18n LANGUAGE_CODE = 'fr' LANGUAGES = (('fr', 'French'), ) TIME_ZONE = 'Europe/Paris' USE_I18N = True USE_L10N = True USE_TZ = True ASKBOT_LANGUAGE_MODE = 'single-lang'
config = SafeConfigParser() config.read('config.ini') if config.has_section('hmeserver'): for opt, value in config.items('hmeserver'): if opt == 'apps': config_apps = value.split() elif opt == 'address': host = value elif opt == 'port': port = int(value) elif opt == 'basepath': app_root = value elif opt == 'datapath': data_root = value elif opt == 'zeroconf': have_zc = config.getboolean('hmeserver', 'zeroconf') try: opts, apps = getopt.getopt(sys.argv[1:], 'a:p:b:d:zvh', [ 'address=', 'port=', 'basepath=', 'datapath=', 'nozeroconf', 'version', 'help' ]) except getopt.GetoptError, msg: print msg for opt, value in opts: if opt in ('-a', '--address'): host = value elif opt in ('-p', '--port'): port = int(value) elif opt in ('-b', '--basepath'):
logging.basicConfig(format='%(message)s', level=logging.INFO) if len(sys.argv) == 1: logging.critical("No config file specified") sys.exit(1) vibrating = False appliance_active = False last_vibration_time = time.time() start_vibration_time = last_vibration_time config = SafeConfigParser() config.read(sys.argv[1]) verbose = config.getboolean('main', 'VERBOSE') sensor_pin = config.getint('main', 'SENSOR_PIN') begin_seconds = config.getint('main', 'SECONDS_TO_START') end_seconds = config.getint('main', 'SECONDS_TO_END') pushbullet_api_key = config.get('pushbullet', 'API_KEY') msg_period = config.getint('main', 'TIME_BETWEEN_EACH_MESSAGE') pushover_user_key = config.get('pushover', 'user_api_key') pushover_app_key = config.get('pushover', 'app_api_key') pushover_device = config.get('pushover', 'device') pushover_sound = config.get('pushover', 'sound') mqtt_hostname = config.get('mqtt', 'mqtt_hostname') mqtt_port = config.get('mqtt', 'mqtt_port') mqtt_topic = config.get('mqtt', 'mqtt_topic')
class GlobalSettings(Signallable): """ Global PiTiVi settings. The settings object loads settings from three different sources: the global configuration, the local configuration file, and the environment. Modules declare which settings they wish to access by calling the addConfigOption() class method during initialization. @cvar options: A dictionnary of available settings. @cvar environment: A list of the controlled environment variables. """ options = {} environment = set() defaults = {} __signals__ = {} def __init__(self, **kwargs): Signallable.__init__(self) self._config = SafeConfigParser() self._readSettingsFromGlobalConfiguration() self._readSettingsFromConfigurationFile() self._readSettingsFromEnvironmentVariables() def _readSettingsFromGlobalConfiguration(self): # ideally, this should read settings from GConf for ex pass def _readSettingsFromConfigurationFile(self): # This reads the configuration from the user configuration file try: pitivi_path = self.get_local_settings_path() pitivi_conf_file_path = os.path.join(pitivi_path, "pitivi.conf") self._config.read(pitivi_conf_file_path) except ParsingError: return for (section, attrname, typ, key, env, value) in self.iterAllOptions(): if not self._config.has_section(section): continue if key and self._config.has_option(section, key): if typ == int or typ == long: # WARNING/FIXME : This try/except is for a small cockup in previous # configurations where we stored a float value... but declared it # as an integer. try: value = self._config.getint(section, key) except ValueError: value = int(self._config.getfloat(section, key)) elif typ == float: value = self._config.getfloat(section, key) elif typ == bool: value = self._config.getboolean(section, key) else: value = self._config.get(section, key) setattr(self, attrname, value) def _readSettingsFromEnvironmentVariables(self): for (section, attrname, typ, key, env, value) in self.iterAllOptions(): var = get_env_by_type(typ, env) if var is not None: setattr(self, attrname, value) def _writeSettingsToConfigurationFile(self): pitivi_path = self.get_local_settings_path() pitivi_conf_file_path = os.path.join(pitivi_path, "pitivi.conf") for (section, attrname, typ, key, env_var, value) in self.iterAllOptions(): if not self._config.has_section(section): self._config.add_section(section) if key: if value is not None: self._config.set(section, key, str(value)) else: self._config.remove_option(section, key) try: file = open(pitivi_conf_file_path, 'w') except IOError, OSError: return self._config.write(file) file.close()
class GlobalSettings(Signallable): """ Global PiTiVi settings. The settings object loads settings from three different sources: the global configuration, the local configuration file, and the environment. Modules declare which settings they wish to access by calling the addConfigOption() class method during initialization. @cvar options: A dictionnary of available settings. @cvar environment: A list of the controlled environment variables. """ options = {} environment = set() defaults = {} __signals__ = {} def __init__(self, **kwargs): Signallable.__init__(self) self._config = SafeConfigParser() self._readSettingsFromGlobalConfiguration() self._readSettingsFromConfigurationFile() self._readSettingsFromEnvironmentVariables() def _readSettingsFromGlobalConfiguration(self): # ideally, this should read settings from GConf for ex pass def _readSettingsFromConfigurationFile(self): # This reads the configuration from the user configuration file try: conf_file_path = os.path.join(xdg_config_home(), "pitivi.conf") self._config.read(conf_file_path) except ParsingError: return for (section, attrname, typ, key, env, value) in self.iterAllOptions(): if not self._config.has_section(section): continue if key and self._config.has_option(section, key): if typ == int or typ == long: try: value = self._config.getint(section, key) except ValueError: # In previous configurations we incorrectly stored # ints using float values. value = int(self._config.getfloat(section, key)) elif typ == float: value = self._config.getfloat(section, key) elif typ == bool: value = self._config.getboolean(section, key) else: value = self._config.get(section, key) setattr(self, attrname, value) @classmethod def readSettingSectionFromFile(self, cls, section): """ Force reading a particular section of the settings file. Use this if you dynamically determine settings sections/keys at runtime (like in tabsmanager.py). Otherwise, the settings file would be read only once (at the initialization phase of your module) and your config sections would never be read, and thus values would be reset to defaults on every startup because GlobalSettings would think they don't exist. """ if cls._config.has_section(section): for option in cls._config.options(section): # We don't know the value type in advance, just try them all. try: value = cls._config.getfloat(section, option) except: try: value = cls._config.getint(section, option) except: try: value = cls._config.getboolean(section, option) except: value = cls._config.get(section, option) setattr(cls, section + option, value) def _readSettingsFromEnvironmentVariables(self): for (section, attrname, typ, key, env, value) in self.iterAllOptions(): var = get_env_by_type(typ, env) if var is not None: setattr(self, attrname, value) def _writeSettingsToConfigurationFile(self): conf_file_path = os.path.join(xdg_config_home(), "pitivi.conf") for (section, attrname, typ, key, env_var, value) in self.iterAllOptions(): if not self._config.has_section(section): self._config.add_section(section) if key: if value is not None: self._config.set(section, key, str(value)) else: self._config.remove_option(section, key) try: file = open(conf_file_path, 'w') except IOError, OSError: return self._config.write(file) file.close()
class AudioJackGUI(object): def __init__(self, master): self.stop_cb_check = False self.master = master self.font = ('Segoe UI', 10) self.master.minsize(width=800, height=600) self.canvas = Canvas(self.master, bd=0, highlightthickness=0) self.mainframe = ttk.Frame(self.canvas) self.scrollbar = Scrollbar(self.master, orient='vertical', command=self.canvas.yview) self.canvas.configure(yscrollcommand=self.scrollbar.set) self.scrollbar.pack(side=RIGHT, fill=Y) self.canvas.create_window((0, 0), window=self.mainframe, anchor=N, tags='self.mainframe') self.mainframe.bind('<Configure>', self.configure) self.mainframe.pack(side=TOP, fill=X) self.canvas.pack(side=TOP, fill=BOTH, expand=1) self.footer = Frame(self.master, bg='#ddd') self.credits = Label(self.footer, text='AudioJack v0.4.0', font=('Segoe UI', 14), bg='#ddd') # Use Tkinter label because ttk does not make it easy to change colors. self.support_link = Label(self.footer, text='Support', font=('Segoe UI', 14), fg='#167ac6', bg='#ddd') self.support_link.bind('<Enter>', lambda event: self.enter_link(self.support_link)) self.support_link.bind('<Button-1>', self.open_url) self.support_link.bind('<Leave>', lambda event: self.leave_link(self.support_link)) self.settings_link = Label(self.footer, text='Settings', font=('Segoe UI', 14), fg='#167ac6', bg='#ddd') self.settings_link.bind('<Enter>', lambda event: self.enter_link(self.settings_link)) self.settings_link.bind('<Button-1>', self.open_settings) self.settings_link.bind('<Leave>', lambda event: self.leave_link(self.settings_link)) self.credits.pack(side=LEFT) self.support_link.pack(side=RIGHT) self.settings_link.pack(side=RIGHT, padx=10) self.footer.pack(side=BOTTOM, fill=X) self.canvas.bind_all('<MouseWheel>', self.scroll) self.title = ttk.Label(self.mainframe, text='AudioJack', font=('Segoe UI', 24)) self.title.pack() self.url = ttk.Label(self.mainframe, text='Enter a YouTube or SoundCloud URL below.', font=self.font) self.url.pack() self.url_input = Text(self.mainframe, width=40, height=1, font=self.font, wrap=NONE) self.url_input.bind('<Tab>', focus_next_window) self.url_input.bind('<Return>', self.search) self.url_input.bind('<Control-Key-a>', self.select_all) self.url_input.bind('<Control-Key-A>', self.select_all) self.url_input.pack() self.submit = ttk.Button(self.mainframe, text='Go!', command=self.search) self.submit.pack() self.new_cb = '' self.old_cb = pyperclip.paste() if not os.path.isfile('settings.ini'): self.make_new_config() else: self.config_file = open('settings.ini', 'r+') self.config = SafeConfigParser() self.config.read('settings.ini') if self.config.getboolean('main', 'auto_cb_grab'): self.stop_cb_check = False else: self.stop_cb_check = True def configure(self, e): self.canvas.configure(scrollregion=self.canvas.bbox('all')) def make_new_config(self): self.config_file = open('settings.ini', 'w') self.config = SafeConfigParser() self.config.read('settings.ini') self.config.add_section('main') self.config.set('main', 'download_path', '%s\Downloads' % os.path.expanduser('~')) self.config.set('main', 'auto_cb_grab', 'True') with open('settings.ini', 'w') as file: self.config.write(file) def scroll(self, e): # TODO: Fix scrolling if self.mainframe.winfo_height() > self.master.winfo_height(): self.canvas.yview_scroll(-1 * (e.delta / 30), 'units') def enter_link(self, widget): widget.configure(cursor='hand2', font=('Segoe UI', 14, 'underline')) def open_url(self, e): webbrowser.open('http://blue9.github.io/AudioJack-GUI/', autoraise=True) def leave_link(self, widget): widget.configure(cursor='arrow', font=('Segoe UI', 14)) def open_settings(self, e): self.settings_window = Toplevel(self.mainframe, height=50) self.settings_window.title('AudioJack-GUI v0.4.0 - Settings') self.settings_window.iconbitmap('AudioJack Icon.ico') Label(self.settings_window, text='Download path for music:').grid(row=0, column=0, padx=10, pady=10) self.download_path_input = Text(self.settings_window, width=50, height=1) self.download_path_input.grid(row=0, column=1, padx=10, pady=10) self.download_path_input.insert(INSERT, self.config.get('main', 'download_path')) Button(self.settings_window, text='Browse...', command=self.get_folder_path).grid(row=0, column=2, padx=10, pady=10) Label(self.settings_window, text='Auto Clipboard Paste ').grid(row=1, column=0, padx=10, pady=10) self.cb_var = IntVar() self.auto_cb_grab_box = Checkbutton(self.settings_window, variable=self.cb_var) if self.config.getboolean('main', 'auto_cb_grab'): self.auto_cb_grab_box.select() else: self.auto_cb_grab_box.deselect() self.auto_cb_grab_box.grid(row=1, column=1, sticky=W, padx=10, pady=10) self.buttons_frame = Frame(self.settings_window) self.buttons_frame.grid(row=5, column=1, padx=10, pady=10, sticky=S) ttk.Button(self.buttons_frame, text='OK', command=self.save_settings).pack(side=RIGHT) ttk.Button(self.buttons_frame, text='Cancel', command=self.cancel_settings).pack(side=RIGHT) def get_folder_path(self): self.download_path_input.delete(0.0, END) self.download_path_input.insert(INSERT, tkFileDialog.askdirectory(parent=self.settings_window, title='Choose a Folder')) def save_settings(self): self.config.set('main', 'download_path', self.download_path_input.get(0.0, END).replace('\n', '').strip()) self.config.set('main', 'auto_cb_grab', str(self.cb_var.get())) self.stop_cb_check = not self.cb_var.get() self.config.write(open('settings.ini', 'r+')) self.settings_window.destroy() def cancel_settings(self): self.settings_window.destroy() def select_all(self, e): self.url_input.tag_add(SEL, '1.0', END) self.url_input.mark_set(INSERT, '1.0') self.url_input.see(INSERT) return 'break' def disable_search(self): self.url_input.config(state=DISABLED) self.submit.config(state=DISABLED) self.url_input.unbind('<Return>') def enable_search(self): self.url_input.config(state=NORMAL) self.submit.config(state=NORMAL) self.url_input.bind('<Return>', self.search) def cancel_search(self): self.cancel.configure(text='Please wait...') self.run = False def get_results(self, input): try: results = audiojack.get_results(input)[:8] images = [] for i, result in enumerate(results): if self.run: image_data = Image.open(StringIO(results[i]['img'].decode('base64'))) image_data = image_data.resize((200, 200), Image.ANTIALIAS) images.append(ImageTk.PhotoImage(image=image_data)) else: break if self.run: self.q.put([results, images]) else: self.q.put(0) except (ExtractorError, DownloadError): # If the URL is invalid, self.q.put(-1) # put -1 into the queue to indicate that the URL is invalid. except NetworkError: self.q.put(-2) def search(self, event=None): self.run = True input = self.url_input.get(0.0, END).replace('\n', '').replace(' ', '').replace('\t', '') self.reset() self.q = Queue.Queue() t = Thread(target=self.get_results, args=[input]) t.daemon = True t.start() self.disable_search() self.search_progress = ttk.Progressbar(self.mainframe, length=200, mode='indeterminate') self.search_progress.pack() self.search_progress.start(20) self.cancel = ttk.Button(self.mainframe, text='Cancel', command=self.cancel_search) self.cancel.pack() self.add_results(input) def add_results(self, url): try: self.results_images = self.q.get(0) self.search_progress.pack_forget() self.search_progress.destroy() self.cancel.pack_forget() self.cancel.destroy() if self.results_images == 0: self.reset() elif self.results_images == -1: # If the URL is invalid self.error = ttk.Label(self.mainframe, text='Error: Invalid URL', font=self.font, foreground='#ff0000') self.error.pack() # Create an error message self.enable_search() # Enable the search option again elif self.results_images == -2: self.error = ttk.Label(self.mainframe, text='Error: Network error', font=self.font, foreground='#ff0000') self.error.pack() # Create an error message self.enable_search() # Enable the search option again else: self.enable_search() self.results = self.results_images[0] self.images = self.results_images[1] self.results_frame = ttk.Frame(self.mainframe) self.results_label = ttk.Label(self.mainframe, text='Results:', font=self.font) self.results_label.pack() for i, result in enumerate(self.results): text = '%s\n%s\n%s' % (result['title'], result['artist'], result['album']) self.result = ttk.Button(self.results_frame, text=text, image=self.images[i], compound=TOP, command=partial(self.download, result)) self.result.grid(column=i % 4, row=i / 4) self.results_frame.pack() self.create_custom_frame(url) except Queue.Empty: self.master.after(10, lambda: self.add_results(url)) def create_custom_frame(self, url): self.custom_frame = ttk.Frame(self.mainframe) self.custom_title = ttk.Label(self.custom_frame, text='Custom tags:') self.artist_label = ttk.Label(self.custom_frame, text='Artist: ') self.artist_input = Text(self.custom_frame, width=20, height=1, font=self.font) self.artist_input.bind('<Tab>', focus_next_window) self.title_label = ttk.Label(self.custom_frame, text='Title: ') self.title_input = Text(self.custom_frame, width=20, height=1, font=self.font) self.title_input.bind('<Tab>', focus_next_window) self.album_label = ttk.Label(self.custom_frame, text='Album: ') self.album_input = Text(self.custom_frame, width=20, height=1, font=self.font) self.album_input.bind('<Tab>', focus_next_window) self.cover_art = ttk.Button(self.custom_frame, text='Browse for cover art', command=self.cover_art_browse) self.cover_art_path = Entry(self.custom_frame, width=20, font=self.font) self.custom_submit = ttk.Button(self.custom_frame, text='Download using custom tags', command=partial(self.custom, url)) self.custom_title.grid(row=0, columnspan=2) self.artist_label.grid(column=0, row=1) self.artist_input.grid(column=1, row=1) self.title_label.grid(column=0, row=2) self.title_input.grid(column=1, row=2) self.album_label.grid(column=0, row=3) self.album_input.grid(column=1, row=3) self.cover_art.grid(column=0, row=4) self.cover_art_path.grid(column=1, row=4) self.custom_submit.grid(row=5, columnspan=2, sticky=EW, pady=10) self.custom_frame.pack(pady=10) def cover_art_browse(self): image = tkFileDialog.askopenfilename(initialdir=os.path.expanduser('~'), parent=root, filetypes=[('JPEG files', '*.jpg')]) self.cover_art_path.delete(0, END) self.cover_art_path.insert(0, image) def get_file(self, entry, download_queue): try: file = audiojack.select(entry, self.config.get('main', 'download_path')) download_queue.put(file) except DownloadError as e: if 'ffprobe' in str(e) or 'ffmpeg' in str(e): # Checks if the error is cause by ffmpeg not being installed file = '%s/Downloads/%s.temp' % (os.path.expanduser('~'), audiojack.title) # Delete temp file try: os.remove(file) except Exception: pass download_queue.put(0) def download(self, entry): self.reset() self.download_queue = Queue.Queue() dl_t = Thread(target=self.get_file, args=[entry, self.download_queue]) dl_t.daemon = True dl_t.start() self.disable_search() self.download_progress = ttk.Progressbar(self.mainframe, length=200, mode='indeterminate') self.download_progress.pack() self.download_progress.start(20) self.master.after(100, self.add_file) def add_file(self): try: result = self.download_queue.get(0) if result == 0: self.error = ttk.Label(self.mainframe, text='Error: ffmpeg is not installed.', font=self.font, foreground='#ff0000') self.error.pack() else: self.file = result.replace('/', '\\') text = 'Open %s' % self.file self.file_button = ttk.Button(self.mainframe, text=text, command=partial(self.open_file, self.file)) self.file_button.pack() self.start_time_label = ttk.Label(self.mainframe, text='Start time: ') self.start_time_label.pack() self.start_time_input = Text(self.mainframe, width=20, height=1, font=self.font) self.start_time_input.bind('<Tab>', focus_next_window) self.start_time_input.pack() self.end_time_label = ttk.Label(self.mainframe, text='End time: ') self.end_time_label.pack() self.end_time_input = Text(self.mainframe, width=20, height=1, font=self.font) self.end_time_input.bind('<Tab>', focus_next_window) self.end_time_input.pack() self.cut_button = ttk.Button(self.mainframe, text='Cut File', command=self.cut) self.cut_button.pack() self.enable_search() self.download_progress.pack_forget() self.download_progress.destroy() self.results_label.pack_forget() self.results_label.destroy() self.results_frame.pack_forget() self.results_frame.destroy() except Queue.Empty: self.master.after(100, self.add_file) def custom(self, url): entry = { 'artist': self.artist_input.get(0.0, END).replace('\n', ''), 'title': self.title_input.get(0.0, END).replace('\n', ''), 'album': self.album_input.get(0.0, END).replace('\n', ''), 'url': url } try: with open(self.cover_art_path.get().replace('\n', ''), 'rb') as file: entry['img'] = file.read().encode('base64') except IOError: print('File not found') self.reset() file = audiojack.select(entry).replace('/', '\\') text = 'Open %s' % file self.file = ttk.Button(self.mainframe, text=text, command=partial(self.open_file, file)) self.file.pack() def open_file(self, file): os.startfile(file) def cut(self): ''' Cut the mp3 file ''' self.file_button.config(state=DISABLED) self.cut_button.config(state=DISABLED) start_time = self.start_time_input.get(0.0, END).replace('\n', '') end_time = self.end_time_input.get(0.0, END).replace('\n', '') self.master.update_idletasks() audiojack.cut_file(self.file, start_time, end_time) self.file_button.config(state=NORMAL) self.cut_button.config(state=NORMAL) def reset(self): self.url_input.delete(0.0, END) self.url_input.config(state=NORMAL) self.submit.config(state=NORMAL) try: self.error.pack_forget() self.error.destroy() except Exception: pass try: self.cancel.pack_forget() self.cancel.destroy() except Exception: pass try: self.results_label.pack_forget() self.results_label.destroy() except Exception: pass try: self.results_frame.pack_forget() self.results_frame.destroy() except Exception: pass try: self.custom_frame.pack_forget() self.custom_frame.destroy() except Exception: pass try: self.file.pack_forget() self.file.destroy() except Exception: pass try: self.file_button.pack_forget() self.file_button.destroy() except Exception: pass try: self.start_time_label.pack_forget() self.start_time_label.destroy() except Exception: pass try: self.start_time_input.pack_forget() self.start_time_input.destroy() except Exception: pass try: self.end_time_label.pack_forget() self.end_time_label.destroy() except Exception: pass try: self.end_time_input.pack_forget() self.end_time_input.destroy() except Exception: pass try: self.cut_button.pack_forget() self.cut_button.destroy() except Exception: pass
def config(self, config_file_name): """ Read data from config file to setup scan """ self.__logger.info("Reading data from config-file:%s" % config_file_name) parser = SafeConfigParser() parser.read(config_file_name) # Read SCAN options first self.fel_mode = parser.get("SCAN", "fel_mode") self.laser = parser.getboolean("SCAN", "laser") # Use fel_mode to detemine which scan parameters to use # -- SASE if self.fel_mode == "SASE": self.__logger.info("SASE Mode selected") self.SASE = fel_energy(parser.get("SASE", "ctrl_pv"), parser.get("SASE", "rbv_pv"), parser.getfloat("SASE", "rbv_done")) self.scan_start = parser.getfloat("SASE", "scan_start") self.scan_stop = parser.getfloat("SASE", "scan_stop") self.scan_steps = parser.getint("SASE", "scan_steps") # -- SEEDED if self.fel_mode == "SEEDED": self.__logger.info("SEEDED Mode selected") self.SEEDED = fel_energy(parser.get("SEEDED", "ctrl_pv"), parser.get("SEEDED", "rbv_pv"), parser.getfloat("SEEDED", "rbv_done")) self.scan_start = parser.getfloat("SEEDED", "scan_start") self.scan_stop = parser.getfloat("SEEDED", "scan_stop") self.scan_steps = parser.getint("SEEDED", "scan_steps") # -- SASE + SEEDED if "SASE" in self.fel_mode and "SEEDED" in self.fel_mode: self.__logger.info("SASE+SEEDED Mode selected") self.SASE = fel_energy(parser.get("SASE", "ctrl_pv"), parser.get("SASE", "rbv_pv"), parser.getfloat("SASE", "rbv_done")) self.SEEDED = fel_energy(parser.get("SEEDED", "ctrl_pv"), parser.get("SEEDED", "rbv_pv"), parser.getfloat("SEEDED", "rbv_done")) # - SASE config scan values define the scan range # The MONO calibration is used to convert from grating # to energy value, which is used to set the SEEDED # energy during the scan self.scan_start = parser.getfloat("SASE", "scan_start") self.scan_stop = parser.getfloat("SASE", "scan_stop") self.scan_steps = parser.getint("SASE", "scan_steps") self.__logger.info("Grating range (mm): %f -- %f" % (self.scan_start, self.scan_stop)) # Read Mono calibration # DIRECT CUT-N-PASTE FROM MARTIN BEYE'S seedingscanner3 SCRIPT y1 = parser.getfloat('MONO', 'LowGrating') y2 = parser.getfloat('MONO', 'MidGrating') y3 = parser.getfloat('MONO', 'HighGrating') x1 = parser.getfloat('MONO', 'LowMachine') x2 = parser.getfloat('MONO', 'MidMachine') x3 = parser.getfloat('MONO', 'HighMachine') denom = (x1 - x2) * (x1 - x3) * (x2 - x3) A = (x3 * (y2 - y1) + x2 * (y1 - y3) + x1 * (y3 - y2)) / denom B = (x3**2 * (y1 - y2) + x2**2 * (y3 - y1) + x1**2 * (y2 - y3)) / denom C = (x2 * x3 * (x2 - x3) * y1 + x3 * x1 * (x3 - x1) * y2 + x1 * x2 * (x1 - x2) * y3) / denom self.energy_to_grating = np.poly1d([A, B, C]) # Convert grating values to energy energy_start = self.grating_to_energy(self.scan_start) energy_stop = self.grating_to_energy(self.scan_stop) self.__logger.info("Converted Energy range (eV): %f -- %f" % (energy_start, energy_stop)) # Read Andor camera options self.andor_integration = parser.getfloat("ANDOR", "integration") self.andor_readout = parser.getfloat("ANDOR", "readout") self.andor_simmode = parser.getboolean("ANDOR", "simmode") self.andor_expdelay = parser.getfloat("ANDOR", "expdelay") self.andor_postdelay = parser.getfloat("ANDOR", "postdelay") self.andor_opendelay = parser.getfloat("ANDOR", "opendelay") self.andor_scandelay = parser.getfloat("ANDOR", "scandelay") # Read laser parameters if laser is TRUE if self.laser: self.laser_wait = parser.getfloat("LASER", "laser_wait") self.laser_shutter = \ lasershutter(parser.getboolean("LASER","laser1"), parser.getboolean("LASER","laser2"), parser.getboolean("LASER","laser3") ) # Get the PVs to send data to PSANA self.scan_start_pv = parser.get("ANALYSIS", "scan_start_pv") self.scan_stop_pv = parser.get("ANALYSIS", "scan_stop_pv") self.scan_steps_pv = parser.get("ANALYSIS", "scan_steps_pv") self.fel_mode_pv = parser.get("ANALYSIS", "fel_mode_pv") # Set PVs to send data to PSANA caput(self.scan_start_pv, self.scan_start) caput(self.scan_stop_pv, self.scan_stop) caput(self.scan_steps_pv, self.scan_steps) caput(self.fel_mode_pv, self.fel_mode)
else: listContent = fileRead("file://" + args.config, sc) tmpStr = ''.join(str(e)+"\n" for e in listContent) stringIOContent = StringIO.StringIO(tmpStr) cf.readfp(stringIOContent) train_regex = cf.get("data", "train") test_regex = cf.get("data", "test") try: data_path = cf.get("data", "data_path") except: print ("WARNING: Unable to read data_path from config file. It could be caused by the environment variable settings, which it not supported when running in yarn mode") prep_path = cf.get("data", "prep_path") data_format = cf.get("data", "format") parallel_flag = cf.getboolean("option", "parallel_train") shards_number = cf.getint( "option", "shards") maxIteration = cf.getint( "option", "iteration") l_filename = cf.get( "option", "l_filename") if cf.get( "option", "l_filename") != '' else None d_filename = cf.get( "option", "d_filename") if cf.get( "option", "l_filename") != '' else None debug.debug.time_accounting_flag = cf.getboolean("option", "timer") dump_freq = cf.getint( "option", "dump_frequency") debug.debug.log_feature_request_flag = cf.getboolean("option", "log-feature-request") interactValue = cf.getboolean("option", "interactive") learnerValue = cf.get( "core", "learner") fgenValue = cf.get( "core", "feature_generator") parserValue = cf.get( "core", "parser") if True: # Process other arguments if args.hadoop:
class parameters(object): def __init__(self, parfile='Parfiles/default.par', mode='forward_model', mpi=True, log=True): # todo might switch mpi=False as default?? ''' a parameter file is parsed and initial parameter values are set. to add a new parameter edit this file and the input .par file. ''' if mpi == False: MPIimport = False MPIrank = 0 MPIsize = 0 else: try: from mpi4py import MPI MPIimport = True except ImportError: MPIimport = False pass # MPI support if MPIimport: MPIrank = MPI.COMM_WORLD.Get_rank() MPIsize = MPI.COMM_WORLD.Get_size() else: MPIrank = 0 MPIsize = 0 #config file parser if parfile: self.parser = SafeConfigParser() try: self.parser.readfp(open(parfile, 'rb')) # python 2 except: self.parser.read_file(open(parfile, 'rt', encoding='latin1')) # python 3 self.parfile = parfile self.default_parser = SafeConfigParser() try: self.default_parser.readfp(open('Parfiles/default.par', 'rb')) # python 2 except: self.default_parser.read_file( open('Parfiles/default.par', 'rt', encoding='latin1')) # python 3 self.default_parser.sections() self.verbose = self.getpar('General', 'verbose', 'bool') self.verbose_all_threads = self.getpar('General', 'verbose_all_threads', 'bool') if len(logging.getLogger().handlers ) == 0: # be sure to load only one logging handler # configure logging instance if MPIrank == 0: if not os.path.isdir(self.getpar('Output', 'path')): logging.info('Create folder Output') os.makedirs(self.getpar('Output', 'path')) logging.basicConfig(filename=os.path.join( self.getpar('Output', 'path'), 'taurex.log'), level=logging.DEBUG) if (MPIrank == 0 and not self.verbose_all_threads) or self.verbose_all_threads: # define a Handler which writes INFO messages or higher to the sys.stderr self.console = logging.StreamHandler() if MPIsize > 1: formatter = logging.Formatter( '%(asctime)s - Thread ' + str(MPIrank) + ' - %(levelname)s - %(message)s') else: formatter = logging.Formatter( '%(asctime)s - %(levelname)s - %(message)s') self.console.setFormatter(formatter) logging.getLogger().addHandler(self.console) logging.info('Log started. Verbose for all threads: %s' % self.verbose_all_threads) if not log: logging.getLogger().disabled = True else: logging.getLogger().disabled = False try: self.version = subprocess.check_output(["git", "describe"])[:-1] logging.info('Running TauREx from git respository (%s)' % self.version) except: pass logging.info('Initialise parameters object') if mode in ['forward_model', 'retrieval']: self.mode = mode logging.info('TauREx is running in %s mode' % self.mode) else: logging.info('Mode %s is not recognized' % mode) # list of all molecules for which we have cross sections self.all_absorbing_gases = [ 'H2O', 'HCN', 'CH4', 'CO2', 'CO', 'NH3', 'C2H2' ] # list of all inactive gases we take care of self.all_inactive_gases = ['He', 'H2', 'N2'] # section General self.gen_manual_waverange = self.getpar('General', 'manual_waverange', 'bool') self.gen_wavemin = self.getpar('General', 'wavemin', 'float') self.gen_wavemax = self.getpar('General', 'wavemax', 'float') self.gen_type = self.getpar('General', 'type') self.gen_ace = self.getpar('General', 'ace', 'bool') self.gen_compile_cpp = self.getpar('General', 'compile_cpp', 'bool') self.gen_run_gui = False # section Input self.in_spectrum_file = self.getpar('Input', 'spectrum_file') #self.in_spectrum_micron = self.getpar('Input', 'spectrum_micron', 'bool') self.in_spectrum_db = self.getpar('Input', 'spectrum_db') #self.in_use_ATMfile = self.getpar('Input','use_ATMfile', 'bool') #self.in_atm_file = self.getpar('Input','atm_file') self.in_opacity_method = self.getpar('Input', 'opacity_method') self.in_xsec_path = self.getpar('Input', 'xsec_path') self.in_ktab_path = self.getpar('Input', 'ktab_path') self.in_custom_temp_range = self.getpar('Input', 'custom_temp_range', 'list-float') self.in_cia_path = self.getpar('Input', 'cia_path') self.in_mie_path = self.getpar('Input', 'mie_path') self.in_star_path = self.getpar('Input', 'star_path') # section Output self.out_path = self.getpar('Output', 'path') #self.out_save_plots = self.getpar('Output','save_plots', 'bool') self.out_sigma_spectrum = self.getpar('Output', 'sigma_spectrum', 'bool') self.out_sigma_spectrum_frac = self.getpar('Output', 'sigma_spectrum_frac', 'float') # section Star self.star_radius = self.getpar('Star', 'radius', 'float') * RSOL self.star_temp = self.getpar('Star', 'temp', 'float') self.star_use_blackbody = self.getpar('Star', 'use_blackbody', 'bool') # section Planet self.planet_class = self.getpar('Planet', 'class') self.planet_radius = self.getpar('Planet', 'radius', 'float') * RJUP self.planet_mass = self.getpar('Planet', 'mass', 'float') * MJUP # section Atmosphere self.atm_nlayers = self.getpar('Atmosphere', 'nlayers', 'int') self.atm_max_pres = self.getpar('Atmosphere', 'max_pressure', 'float') self.atm_min_pres = self.getpar('Atmosphere', 'min_pressure', 'float') self.atm_tp_type = self.getpar('Atmosphere', 'tp_type') self.atm_tp_file = self.getpar('Atmosphere', 'tp_file') self.atm_tp_iso_temp = self.getpar('Atmosphere', 'tp_iso_temp', 'float') self.atm_tp_guillot_T_irr = self.getpar('Atmosphere', 'tp_guillot_T_irr', 'float') self.atm_tp_guillot_kappa_ir = self.getpar('Atmosphere', 'tp_guillot_kappa_ir', 'float') self.atm_tp_guillot_kappa_v1 = self.getpar('Atmosphere', 'tp_guillot_kappa_v1', 'float') self.atm_tp_guillot_kappa_v2 = self.getpar('Atmosphere', 'tp_guillot_kappa_v2', 'float') self.atm_tp_guillot_alpha = self.getpar('Atmosphere', 'tp_guillot_alpha', 'float') self.atm_tp_2point_T_surf = self.getpar('Atmosphere', 'tp_2point_T_surf', 'float') self.atm_tp_2point_T_trop_diff = self.getpar('Atmosphere', 'tp_2point_T_trop_diff', 'float') self.atm_tp_2point_P_trop = self.getpar('Atmosphere', 'tp_2point_P_trop', 'float') self.atm_tp_Npoint_T_list = self.getpar('Atmosphere', 'tp_Npoint_T_list', 'list-float') self.atm_tp_Npoint_P_list = self.getpar('Atmosphere', 'tp_Npoint_P_list', 'list-float') self.atm_tp_Npoint_smooth = self.getpar('Atmosphere', 'tp_Npoint_smooth', 'int') self.atm_tp_corr_length = self.getpar('Atmosphere', 'tp_corr_length', 'float') self.atm_active_gases = [ gas.upper() for gas in self.getpar('Atmosphere', 'active_gases', 'list-str') ] if self.atm_active_gases[0] == 'FILE': self.atm_active_gases = 'file' self.atm_active_gases_mixratios = self.getpar( 'Atmosphere', 'active_gases_mixratios', 'list-float') self.atm_active_gases_file = self.getpar('Atmosphere', 'active_gases_file') self.atm_inactive_gases = ['H2', 'HE', 'N2'] self.atm_N2_mixratio = self.getpar('Atmosphere', 'N2_mixratio', 'float') self.atm_He_H2_ratio = self.getpar('Atmosphere', 'He_H2_ratio', 'float') #self.atm_mu = self.getpar('Atmosphere', 'mu', 'float')*AMU #self.atm_couple_mu = self.getpar('Atmosphere', 'couple_mu', 'bool') self.atm_rayleigh = self.getpar('Atmosphere', 'rayleigh', 'bool') self.atm_mie = self.getpar('Atmosphere', 'mie', 'bool') self.atm_mie_type = self.getpar('Atmosphere', 'mie_type') self.atm_mie_dist_type = self.getpar('Atmosphere', 'mie_dist_type') self.atm_mie_flat = self.getpar('Atmosphere', 'mie_flat', 'bool') # if self.atm_mie: # self.atm_rayleigh = False #Mie replaces Rayleigh self.atm_mie_r = self.getpar('Atmosphere', 'mie_r', 'float') self.atm_mie_q = self.getpar('Atmosphere', 'mie_q', 'float') self.atm_mie_f = self.getpar('Atmosphere', 'mie_f', 'float') self.atm_mie_topP = self.getpar('Atmosphere', 'mie_topP', 'float') self.atm_mie_bottomP = self.getpar('Atmosphere', 'mie_bottomP', 'float') self.atm_cia = self.getpar('Atmosphere', 'cia', 'bool') self.atm_cia_pairs = [ pair.upper() for pair in self.getpar('Atmosphere', 'cia_pairs', 'list-str') ] self.atm_clouds = self.getpar('Atmosphere', 'clouds', 'bool') self.atm_clouds_pressure = self.getpar('Atmosphere', 'clouds_pressure', 'float') self.atm_ace_metallicity = self.getpar('Atmosphere', 'ace_metallicity', 'float') self.atm_ace_co = self.getpar('Atmosphere', 'ace_co', 'float') # section Venot #self.ven_load = self.getpar('Venot', 'load', 'bool') #self.ven_TP_profile_path = self.getpar('Venot', 'TP_profile_path') #self.ven_mol_profile_path = self.getpar('Venot', 'mol_profile_path') #self.ven_exclude_mol = [mol.upper() for mol in self.getpar('Venot','exclude_mol', 'list-str')] # Section Fit #self.fit_transmission = self.getpar('Fitting','transmission', 'bool') #self.fit_emission = self.getpar('Fitting', 'emission', 'bool') self.fit_emission_stage2 = self.getpar('Fitting', 'emission_stage2', 'bool') # misc #self.fit_couple_mu = self.getpar('Fitting','couple_mu', 'bool') #self.fit_inactive_mu_rescale = self.getpar('Fitting','inactive_mu_rescale', 'bool') self.fit_mixratio_log = self.getpar('Fitting', 'mixratio_log', 'bool') #self.fit_clr_trans = self.getpar('Fitting','clr_trans', 'bool') # fit / fix parameters self.fit_fit_active_gases = self.getpar('Fitting', 'fit_active_gases', 'bool') self.fit_fit_N2_mixratio = self.getpar('Fitting', 'fit_N2_mixratio', 'bool') self.fit_fit_He_H2_ratio = self.getpar('Fitting', 'fit_He_H2_ratio', 'bool') #self.fit_fit_inactive = self.getpar('Fitting', 'fit_inactive', 'bool') self.fit_fit_temp = self.getpar('Fitting', 'fit_temp', 'bool') #self.fit_fit_mu = self.getpar('Fitting', 'fit_mu', 'bool') self.fit_fit_radius = self.getpar('Fitting', 'fit_radius', 'bool') #self.fit_fit_P0 = self.getpar('Fitting', 'fit_P0', 'bool') self.fit_fit_clouds_pressure = self.getpar('Fitting', 'fit_clouds_pressure', 'bool') self.fit_fit_ace_metallicity = self.getpar('Fitting', 'fit_ace_metallicity', 'bool') self.fit_fit_ace_co = self.getpar('Fitting', 'fit_ace_co', 'bool') # prior bounds self.fit_mixratio_bounds = self.getpar('Fitting', 'mixratio_bounds', 'list-float') #self.fit_X_inactive_bounds = self.getpar('Fitting', 'X_inactive_bounds', 'list-float') #self.fit_clr_bounds = self.getpar('Fitting', 'clr_bounds', 'list-float') self.fit_He_H2_ratio_bounds = self.getpar('Fitting', 'He_H2_ratio_bounds', 'list-float') #self.fit_mu_bounds = self.getpar('Fitting', 'mu_bounds', 'list-float') self.fit_radius_bounds = self.getpar('Fitting', 'radius_bounds', 'list-float') self.fit_radius_bounds_factor = self.getpar('Fitting', 'radius_bounds_factor', 'float') #self.fit_P0_bounds = self.getpar('Fitting', 'P0_bounds', 'list-float') self.fit_clouds_pressure_bounds = self.getpar( 'Fitting', 'clouds_pressure_bounds', 'list-float') self.fit_fit_mie = self.getpar('Fitting', 'fit_mie', 'bool') self.fit_fit_mie_composition = self.getpar('Fitting', 'fit_mie_composition', 'bool') self.fit_fit_mie_radius = self.getpar('Fitting', 'fit_mie_radius', 'bool') self.fit_mie_r_bounds = self.getpar('Fitting', 'mie_r_bounds', 'list-float') self.fit_mie_q_bounds = self.getpar('Fitting', 'mie_q_bounds', 'list-float') self.fit_mie_f_bounds = self.getpar('Fitting', 'mie_f_bounds', 'list-float') self.fit_fit_mie_cloud_topP = self.getpar('Fitting', 'fit_mie_Ptop', 'bool') self.fit_mie_topP_bounds = self.getpar('Fitting', 'mie_ptop_bounds', 'list-float') self.fit_fit_mie_cloud_bottomP = self.getpar('Fitting', 'fit_mie_Pbottom', 'bool') self.fit_mie_bottomP_bounds = self.getpar('Fitting', 'mie_pbottom_bounds', 'list-float') self.fit_ace_metallicity_bounds = self.getpar( 'Fitting', 'ace_metallicity_bounds', 'list-float') self.fit_ace_co_bounds = self.getpar('Fitting', 'ace_co_bounds', 'list-float') self.fit_tp_iso_bounds = self.getpar('Fitting', 'tp_iso_bounds', 'list-float') self.fit_tp_guillot_T_irr_bounds = self.getpar( 'Fitting', 'tp_guillot_T_irr_bounds', 'list-float') self.fit_tp_guillot_kappa_ir_bounds = self.getpar( 'Fitting', 'tp_guillot_kappa_ir_bounds', 'list-float') self.fit_tp_guillot_kappa_v1_bounds = self.getpar( 'Fitting', 'tp_guillot_kappa_v1_bounds', 'list-float') self.fit_tp_guillot_kappa_v2_bounds = self.getpar( 'Fitting', 'tp_guillot_kappa_v2_bounds', 'list-float') self.fit_tp_guillot_alpha_bounds = self.getpar( 'Fitting', 'tp_guillot_alpha_bounds', 'list-float') self.fit_hybrid_alpha_bounds = self.getpar('Fitting', 'hybrid_alpha_bounds', 'list-float') # section Downhill self.downhill_run = self.getpar('Downhill', 'run', 'bool') self.downhill_type = self.getpar('Downhill', 'type') self.downhill_out_filename = self.getpar('Downhill', 'out_filename') # section MCMC self.mcmc_run = self.getpar('MCMC', 'run', 'bool') self.mcmc_update_std = self.getpar('MCMC', 'update_std', 'bool') self.mcmc_iter = self.getpar('MCMC', 'iter', 'float') self.mcmc_burn = self.getpar('MCMC', 'burn', 'float') self.mcmc_thin = self.getpar('MCMC', 'thin', 'float') self.mcmc_verbose = self.getpar('MCMC', 'verbose', 'bool') self.mcmc_progressbar = self.getpar('MCMC', 'progressbar', 'bool') self.mcmc_out_filename = self.getpar('MCMC', 'out_filename') # section MultiNest self.nest_run = self.getpar('MultiNest', 'run', 'bool') self.nest_resume = self.getpar('MultiNest', 'resume', 'bool') self.nest_verbose = self.getpar('MultiNest', 'verbose', 'bool') self.nest_path = self.getpar('MultiNest', 'nest_path') # @todo not used? self.nest_samp_eff = self.getpar('MultiNest', 'sampling_eff') self.nest_nlive = self.getpar('MultiNest', 'n_live_points', 'int') self.nest_max_iter = self.getpar('MultiNest', 'max_iter', 'int') self.nest_multimodes = self.getpar('MultiNest', 'multimodes') self.nest_max_modes = self.getpar('MultiNest', 'max_modes', 'int') self.nest_const_eff = self.getpar('MultiNest', 'const_eff', 'bool') self.nest_ev_tol = self.getpar('MultiNest', 'evidence_tolerance', 'float') self.nest_mode_tol = self.getpar('MultiNest', 'mode_tolerance', 'float') self.nest_imp_sampling = self.getpar('MultiNest', 'imp_sampling', 'bool') self.nest_out_filename = self.getpar('MultiNest', 'out_filename') #section PolyChord self.nest_poly_run = self.getpar('PolyChord', 'run', 'bool') self.nest_poly_resume = self.getpar('PolyChord', 'resume', 'bool') self.nest_poly_path = self.getpar('PolyChord', 'path') self.nest_poly_file_root = self.getpar('PolyChord', 'file_root') self.nest_poly_precision = self.getpar('PolyChord', 'precision', 'float') self.nest_poly_clustering = self.getpar('PolyChord', 'clustering', 'bool') self.nest_poly_out_filename = self.getpar('PolyChord', 'out_filename') #determining upper and lower bounds if self.atm_mie_topP == -1: self.atm_mie_topP = self.atm_min_pres if self.atm_mie_bottomP == -1: self.atm_mie_bottomP = self.atm_max_pres if self.fit_mie_topP_bounds[0] == -1: self.fit_mie_topP_bounds[0] = self.atm_min_pres if self.fit_mie_topP_bounds[1] == -1: self.fit_mie_topP_bounds[1] = self.atm_max_pres if self.fit_mie_bottomP_bounds[0] == -1: self.fit_mie_bottomP_bounds[0] = self.atm_min_pres if self.fit_mie_bottomP_bounds[1] == -1: self.fit_mie_bottomP_bounds[1] = self.atm_max_pres def getpar(self, sec, par, type=None): # get parameter from user defined parser. If parameter is not found there, load the default parameter # the default parameter file parser is self.default_parser, defined in init try: if type == None: try: return self.parser.get(sec, par) except: return self.default_parser.get(sec, par) elif type == 'float': try: return self.parser.getfloat(sec, par) except: return self.default_parser.getfloat(sec, par) elif type == 'bool': try: return self.parser.getboolean(sec, par) except: return self.default_parser.getboolean(sec, par) elif type == 'int': try: return self.parser.getint(sec, par) except: return self.default_parser.getint(sec, par) elif type == 'list-str': try: l = self.parser.get(sec, par).split(',') return [str(m).strip() for m in l] except: l = self.default_parser.get(sec, par).split(',') return [str(m).strip() for m in l] elif type == 'list-float': try: l = self.parser.get(sec, par).split(',') return [float(m) for m in l] except: l = self.default_parser.get(sec, par).split(',') return [float(m) for m in l] elif type == 'list-int': try: l = self.parser.get(sec, par).split(',') return [int(m) for m in l] except: l = self.default_parser.get(sec, par).split(',') return [int(m) for m in l] else: logging.error( 'Cannot set parameter %s in section %s. Parameter type %s not recognized. Set to None' ( par, sec, type)) return None except: logging.error( 'Cannot set parameter %s in section %s. Set to None' % (par, sec)) return None def params_to_dict(self): # covert param variables to dictionary pr = {} for name in dir(self): value = getattr(self, name) if not name.startswith('__') and not inspect.ismethod(value) and \ name != 'parser' and name != 'default_parser' and name != 'console': pr[name] = value return pr
expected_remote_fs = '' try: rsyncpath = '--rsync-path="' + config.get( path, 'rsync-path').strip('"') + '"' except: rsyncpath = '' try: if os.path.isabs(config.get(path, 'check-file').strip('"')): checkfile = config.get(path, 'check-file').strip('"') else: checkfile = path + '/' + config.get( path, 'check-file').strip('"') except: checkfile = path try: if config.getboolean(path, 'delete'): delete = '--delete' else: delete = '' except: delete = '' try: exclude_config_get = config.get(path, 'exclude') try: exclude = ' ' for item in json.loads(exclude_config_get): exclude += '--exclude ' + item + ' ' except Exception, e: logging.error("error reading excludes for " + path + " - ABORTING - " + str(e)) error_count = error_count + 1
def cli(): parser = argparse.ArgumentParser( description='Check HTTPs rules for validity') parser.add_argument('checker_config', help='an integer for the accumulator') parser.add_argument('rule_files', nargs="*", default=[], help="Specific XML rule files") parser.add_argument('--json_file', default=None, help='write results in json file') args = parser.parse_args() config = SafeConfigParser() config.read(args.checker_config) logfile = config.get("log", "logfile") loglevel = convertLoglevel(config.get("log", "loglevel")) if logfile == "-": logging.basicConfig(stream=sys.stderr, level=loglevel, format="%(levelname)s %(message)s") else: logging.basicConfig( filename=logfile, level=loglevel, format= "%(asctime)s %(levelname)s %(message)s [%(pathname)s:%(lineno)d]") autoDisable = False if config.has_option("rulesets", "auto_disable"): autoDisable = config.getboolean("rulesets", "auto_disable") # Test rules even if they have default_off=... includeDefaultOff = False if config.has_option("rulesets", "include_default_off"): includeDefaultOff = config.getboolean("rulesets", "include_default_off") ruledir = config.get("rulesets", "rulesdir") checkCoverage = False if config.has_option("rulesets", "check_coverage"): checkCoverage = config.getboolean("rulesets", "check_coverage") checkNonmatchGroups = False if config.has_option("rulesets", "check_nonmatch_groups"): checkNonmatchGroups = config.getboolean("rulesets", "check_nonmatch_groups") checkTestFormatting = False if config.has_option("rulesets", "check_test_formatting"): checkTestFormatting = config.getboolean("rulesets", "check_test_formatting") certdir = config.get("certificates", "basedir") if config.has_option("rulesets", "skiplist"): skiplist = config.get("rulesets", "skiplist") with open(skiplist) as f: for line in f: fileHash = line.split(" ")[0] skipdict[binascii.unhexlify(fileHash)] = 1 threadCount = config.getint("http", "threads") httpEnabled = True if config.has_option("http", "enabled"): httpEnabled = config.getboolean("http", "enabled") #get all platform dirs, make sure "default" is among them certdirFiles = glob.glob(os.path.join(certdir, "*")) havePlatforms = set([ os.path.basename(fname) for fname in certdirFiles if os.path.isdir(fname) ]) logging.debug("Loaded certificate platforms: %s", ",".join(havePlatforms)) if "default" not in havePlatforms: raise RuntimeError( "Platform 'default' is missing from certificate directories") metricName = config.get("thresholds", "metric") thresholdDistance = config.getfloat("thresholds", "max_distance") metricClass = getMetricClass(metricName) metric = metricClass() # Debugging options, graphviz dump dumpGraphvizTrie = False if config.has_option("debug", "dump_graphviz_trie"): dumpGraphvizTrie = config.getboolean("debug", "dump_graphviz_trie") if dumpGraphvizTrie: graphvizFile = config.get("debug", "graphviz_file") exitAfterDump = config.getboolean("debug", "exit_after_dump") if args.rule_files: xmlFnames = args.rule_files else: xmlFnames = glob.glob(os.path.join(ruledir, "*.xml")) trie = RuleTrie() rulesets = [] coverageProblemsExist = False nonmatchGroupProblemsExist = False testFormattingProblemsExist = False for xmlFname in xmlFnames: logging.debug("Parsing %s", xmlFname) if skipFile(xmlFname): logging.debug("Skipping rule file '%s', matches skiplist." % xmlFname) continue try: ruleset = Ruleset(etree.parse(file(xmlFname)).getroot(), xmlFname) except Exception, e: logging.error("Exception parsing %s: %s" % (xmlFname, e)) if ruleset.defaultOff and not includeDefaultOff: logging.debug("Skipping rule '%s', reason: %s", ruleset.name, ruleset.defaultOff) continue # Check whether ruleset coverage by tests was sufficient. if checkCoverage: logging.debug("Checking coverage for '%s'." % ruleset.name) problems = ruleset.getCoverageProblems() for problem in problems: coverageProblemsExist = True logging.error(problem) if checkNonmatchGroups: logging.debug("Checking non-match groups for '%s'." % ruleset.name) problems = ruleset.getNonmatchGroupProblems() for problem in problems: nonmatchGroupProblemsExist = True logging.error(problem) if checkTestFormatting: logging.debug("Checking test formatting for '%s'." % ruleset.name) problems = ruleset.getTestFormattingProblems() for problem in problems: testFormattingProblemsExist = True logging.error(problem) trie.addRuleset(ruleset) rulesets.append(ruleset)
print "Provide config file location" sys.exit(1) settings = SafeConfigParser() settings.read(configfile) ### Atlas Environment variables ATLAS_PORT = settings.get('atlas', 'port') ATLAS_DOMAIN = settings.get('atlas', 'host') USERNAME = settings.get('atlas', 'username') PASSWORD = settings.get('atlas', 'password') CLUSTERNAME = settings.get('atlas', 'clustername') hdfsnameservice = settings.get('atlas', 'hdfsnameservice') dynamic = settings.getboolean('properties', 'createAttributeDynamically') jsonfile = settings.get('properties', 'jsonFile') #IGC IGC = settings.get('IGC', 'IGC') IGCUser = settings.get('IGC', 'IGCUser') IGCPassword = settings.get('IGC', 'IGCPassword') IGCSync = settings.getboolean('IGC', 'IGCSync') IGCAttribute = settings.get('IGC', 'IGCAttribute') IGCRootGlossary = settings.get('IGC', 'IGCRootGlossary') #CHECK IF ATLAS IS UP try: atlascheck() except: print "Cannot connect to Atlas"
','.join(config_parser.sections())) else: raise MissingApplicationConfig( 'Need to put a config ini file in your environment variables') # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = config_parser.get(environment, 'django_secret', raw=True) # SECURITY WARNING: don't run with debug turned on in production! DEBUG = config_parser.getboolean(environment, 'debug') ALLOWED_HOSTS = [ config_parser.get(environment, 'instance_ip'), config_parser.get(environment, 'domain') ] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles',