def main(): print "reading configuration" logging.basicConfig() config = RawConfigParser() config.read(['SystemInfoJabberBot.cfg',expanduser('~/.config/SystemInfoJabberBot.cfg')]) username = config.get('systembot','username') password = config.get('systembot','password') auth_users_raw= config.get('systembot','auth_users') auth_users=auth_users_raw.replace(' ','').split(',') print "set config" bot = SystemInfoJabberBot(username,password,auth_users) # Transmission config if config.has_section('transmissionrpc'): host = config.get('transmissionrpc','host') port = config.getint('transmissionrpc','port') try: user = config.get('transmissionrpc','user') psw = config.get('transmissionrpc','password') bot.setTransmissionConfig(host,port=port,user=user,psw=psw) except NoOptionError: bot.setTransmissionConfig(host,port=port) if config.has_section('logs'): log_files=config.items('logs') bot.setLogFiles( dict(log_files) ) print "start serve" bot.serve_forever() try: bot.quit() except Exception: pass
class Config(): """ Load and access the carbonate configuration. """ def __init__(self, config_file): self.config_file = config_file self.config = RawConfigParser() self.config.read(config_file) def clusters(self): """Return the clusters defined in the config file.""" return self.config.sections() def destinations(self, cluster='main'): """Return a list of destinations for a cluster.""" if not self.config.has_section(cluster): raise SystemExit("Cluster '%s' not defined in %s" % (cluster, self.config_file)) destinations = self.config.get(cluster, 'destinations') return destinations.replace(' ', '').split(',') def replication_factor(self, cluster='main'): """Return the replication factor for a cluster as an integer.""" if not self.config.has_section(cluster): raise SystemExit("Cluster '%s' not defined in %s" % (cluster, self.config_file)) return int(self.config.get(cluster, 'replication_factor')) def ssh_user(self, cluster='main'): """Return the ssh user for a cluster or current user if undefined.""" if not self.config.has_section(cluster): raise SystemExit("Cluster '%s' not defined in %s" % (cluster, self.config_file)) try: return self.config.get(cluster, 'ssh_user') except NoOptionError: return pwd.getpwuid(os.getuid()).pw_name def whisper_lock_writes(self, cluster='main'): """Lock whisper files during carbon-sync.""" if not self.config.has_section(cluster): raise SystemExit("Cluster '%s' not defined in %s" % (cluster, self.config_file)) try: return bool(self.config.get(cluster, 'whisper_lock_writes')) except NoOptionError: return False def hashing_type(self, cluster='main'): """Hashing type of cluster.""" if not self.config.has_section(cluster): raise SystemExit("Cluster '%s' not defined in %s" % (cluster, self.config_file)) hashing_type = 'carbon_ch' try: return self.config.get(cluster, 'hashing_type') except NoOptionError: return hashing_type
def setUp(self): self.driver = webdriver.Firefox() self.driver.implicitly_wait(30) self.verificationErrors = [] self.accept_next_alert = True parser = RawConfigParser() config_file = join(BASE_DIR, '.private/.config.txt') self.name = 'testcluster' parser.read(config_file) try: self.token = parser.get('cloud \"~okeanos\"', 'token') self.auth_url = parser.get('cloud \"~okeanos\"', 'url') self.base_url = parser.get('deploy', 'url') self.project_name = parser.get('project', 'name') auth = check_credentials(self.token) try: list_of_projects = auth.get_projects(state='active') except Exception: self.assertTrue(False,'Could not get list of projects') for project in list_of_projects: if project['name'] == self.project_name: self.project_id = project['id'] except NoSectionError: self.token = 'INVALID_TOKEN' self.auth_url = "INVALID_AUTH_URL" self.base_url = "INVALID_APP_URL" self.project_name = "INVALID_PROJECT_NAME" print 'Current authentication details are kept off source control. ' \ '\nUpdate your .config.txt file in <projectroot>/.private/'
def login(self, config_file=None): """Login wikipedia using credential configuration file. If config_file argument is filled to the method, it override the one of the object (and save it). Other wise it uses the config_file attribute. Args: config_file (str, optional): Path to the credential configuration file. Raises: ValueError: when neither config_file is given to the method or to the objects. """ if config_file is not None: self.config_file = config_file elif self.config_file is None: raise ValueError('Trying to login without config_file') configparser = RawConfigParser() configparser.read(self.config_file) self.site.login(configparser.get('login', 'user'), configparser.get('login', 'password')) self.__is_logged__ = True LOG.info("Logged in as '%s'", configparser.get('login', 'user'))
class HostTest(unittest.TestCase): """ Test various host configurations """ def setUp(self): optparser = BaseOptions() optparser.parseOptions(['dummyfile.xml', '--debug=%s' % logging._levelNames[log.level].lower()]) self.defaults = RawConfigParser() configfiles = self.defaults.read(TESTCONF) self.defaults.get('global', 'dns_domain_name') if len(configfiles) == 0: raise ValueError("Cannot load configuration file: %s" % optparser.options.configfile) self.sitea = Site() self.sitea.name = 'sitea' self.sitea.type = 'primary' self.sitea.location = 'testlab' def test_empty_host(self): """ Test an empty host node """ xmlfile = os.path.join(XML_FILE_LOCATION, 'host_empty.xml') tree = etree.parse(xmlfile) node = tree.getroot() self.failUnlessRaises(KeyError, host.create_host_from_node, node, self.defaults, self.sitea) def test_named_host(self): """ Test a host node with just a name """ xmlfile = os.path.join(XML_FILE_LOCATION, 'host_named.xml') tree = etree.parse(xmlfile) node = tree.getroot() self.failUnlessRaises(KeyError, host.create_host_from_node, node, self.defaults, self.sitea) def test_named_host_os(self): """ Test a host node with just a name """ xmlfile = os.path.join(XML_FILE_LOCATION, 'host_named_os.xml') tree = etree.parse(xmlfile) node = tree.getroot() hostobj = host.create_host_from_node(node, self.defaults, self.sitea) self.failUnlessEqual(hostobj.name, 'fred') self.failUnlessEqual(hostobj.operatingsystem, 'Linux') def test_named_host_os_platform(self): """ Test a host node with just a name """ xmlfile = os.path.join(XML_FILE_LOCATION, 'host_named_os_platform.xml') tree = etree.parse(xmlfile) node = tree.getroot() hostobj = host.create_host_from_node(node, self.defaults, self.sitea) self.failUnlessEqual(hostobj.name, 'fred') self.failUnlessEqual(hostobj.operatingsystem, 'Linux') self.failUnlessEqual(hostobj.platform, 'intel')
def __init__(self, hashfile=LOCATION, storeKey="", configFile=CONFIGFILE): self.userDict = { } # Mapping of username -> salt:hash self.hashfile = hashfile # Encrypted file, contains lines of user:salt:hash self.h = SHA256.new() self.crypto = crypto.TBCrypt() # AES encryption/IV functions self.storeKey = storeKey # Key material to open encrypted hash store if configFile: try: fp = open(configFile) except IOError as e: error = 'IOError: can''t access file ''%s'' (%s).' % (configFile, os.strerror(e.errno)) raise HashStoreException(error) config = RawConfigParser() config.read(configFile) self.storeKey = config.get('hashstore', 'key') self.hashfile = config.get('hashstore', 'location') else: if os.path.exists(self.hashfile): self.updateUserDict() else: self.__saveHashstore() if self.storeKey == '': print "WARNING: hashstoremanager.py: no hashstore key defined!" # print "h.update" self.h.update(self.storeKey) # print "self.storeKey = self.h.digest()" self.storeKey = self.h.digest()
def get_firefox_home_file(needed_file): for firefox_dir in (os.path.expanduser(p) for p in ("~/.mozilla/firefox-3.5/", "~/.mozilla/firefox/")): if os.path.exists(firefox_dir): break else: # no break return None # here we leak firefox_dir config = RawConfigParser({"Default" : 0}) config.read(os.path.join(firefox_dir, "profiles.ini")) path = None for section in config.sections(): if config.has_option(section, "Default") and config.get(section, "Default") == "1": path = config.get (section, "Path") break elif path == None and config.has_option(section, "Path"): path = config.get (section, "Path") if path == None: return "" if path.startswith("/"): return os.path.join(path, needed_file) return os.path.join(firefox_dir, path, needed_file)
def setUp(self): """ Set up the arguments that every unit test for put/get will use. """ parser = RawConfigParser() config_file = join(BASE_DIR, '.private/.config.txt') self.name = 'orkatest' parser.read(config_file) try: self.token = parser.get('cloud \"~okeanos\"', 'token') self.auth_url = parser.get('cloud \"~okeanos\"', 'url') self.base_url = parser.get('deploy', 'url') self.project_name = parser.get('project', 'name') self.master_IP = parser.get('cluster', 'master_ip') clusters = get_user_clusters(self.token) self.active_cluster = None for cluster in clusters: if cluster['master_IP'] == self.master_IP: if cluster['hadoop_status'] == const_hadoop_status_started: self.active_cluster = cluster break else: logging.error(' You can take file actions on active clusters with started hadoop only.') exit(error_fatal) self.opts = {'source': '', 'destination': '', 'token': self.token, 'cluster_id': self.active_cluster['id'], 'auth_url': self.auth_url, 'user': '', 'password': ''} except NoSectionError: self.token = 'INVALID_TOKEN' self.auth_url = "INVALID_AUTH_URL" self.base_url = "INVALID_APP_URL" self.project_name = "INVALID_PROJECT_NAME" print 'Current authentication details are kept off source control. ' \ '\nUpdate your .config.txt file in <projectroot>/.private/'
def handle(self, *args, **options): handle = urllib.urlopen(URL) config = RawConfigParser(dict_type=MultiOrderedDict) config.readfp(handle) master = config.get('demo', 'master-release')[0] modified = False processed = set() for version in config.get('demo', 'branches[]'): demo, created = Demo.objects.get_or_create( name=version, defaults={'master_version': master} ) modified |= created if not created and demo.master_version != master: demo.master_version = master demo.save() modified = True processed.add(demo.id) Demo.objects.exclude(id__in=processed).delete() if modified: purge_cdn(reverse('try'))
def _parse_legacy_config_file(self): """ Parse a legacy configuration file. """ conf = RawConfigParser() conf.read(LEGACY_CONFIG_FILE) styles = self.styles.copy() if conf.has_option('params', 'dm_template'): styles['dm_template'] = conf.get('params', 'dm_template') if conf.has_option('params', 'header_template'): styles['header_template'] = conf.get('params', 'header_template') self.styles.update(styles) if conf.has_option('params', 'logging_level'): self.logging_level = conf.getint('params', 'logging_level') for binding in self.key_bindings: if conf.has_option('keys', binding): custom_key = conf.get('keys', binding) self._set_key_binding(binding, custom_key) palette_labels = [color[0] for color in PALETTE] for label in palette_labels: if conf.has_option('colors', label): custom_fg = conf.get('colors', label) self._set_color(label, custom_fg)
def __read_config(self): """ Obtiene la configuración de la persona de su archivo .person y devuelve True si es válida """ parser = RawConfigParser() with codecs.open("config/" + self.person + '.person', 'r', encoding='utf-8') as f: parser.readfp(f) if parser.has_section("web_bug"): if parser.has_option("web_bug", 'search_terms'): self.__search_terms = parser.get("web_bug", 'search_terms') if self.__search_terms == '': return False else: return False if parser.has_option("web_bug", 'weight'): self.__weight = parser.getint("web_bug", 'weight') if parser.has_option("web_bug", 'weight_no_search_terms'): self.__weight_no_search_terms = parser.getint("web_bug", 'weight_no_search_terms') if parser.has_option("web_bug", 'weight_visit'): self.__weight_visit = parser.getint("web_bug", 'weight_visit') if parser.has_option("web_bug", 'webbug_log'): self.__webbug_log =\ [e.strip() for e in parser.get("web_bug", 'webbug_log').split(',')] if self.__webbug_log == ['']: return False else: return False else: return False return True
def collapse_arakoon(): """ Collapse Arakoon's Tlogs :return: None """ logger.info('Starting arakoon collapse') arakoon_clusters = {} for service in ServiceList.get_services(): if service.type.name in ('Arakoon', 'NamespaceManager', 'AlbaManager'): arakoon_clusters[service.name.replace('arakoon-', '')] = service.storagerouter for cluster, storagerouter in arakoon_clusters.iteritems(): logger.info(' Collapsing cluster {0}'.format(cluster)) contents = EtcdConfiguration.get(ArakoonClusterConfig.ETCD_CONFIG_KEY.format(cluster), raw=True) parser = RawConfigParser() parser.readfp(StringIO(contents)) nodes = {} for node in parser.get('global', 'cluster').split(','): node = node.strip() nodes[node] = ([parser.get(node, 'ip')], parser.get(node, 'client_port')) config = ArakoonClientConfig(str(cluster), nodes) for node in nodes.keys(): logger.info(' Collapsing node: {0}'.format(node)) client = ArakoonAdminClient(node, config) try: client.collapse_tlogs(2) except: logger.exception('Error during collapsing cluster {0} node {1}'.format(cluster, node)) logger.info('Arakoon collapse finished')
def setUp(self): self.driver = webdriver.Firefox() self.driver.implicitly_wait(30) self.verificationErrors = [] self.accept_next_alert = True parser = RawConfigParser() config_file = join(BASE_DIR, ".private/.config.txt") self.name = "testcluster" parser.read(config_file) try: self.token = parser.get('cloud "~okeanos"', "token") self.auth_url = parser.get('cloud "~okeanos"', "url") self.base_url = parser.get("deploy", "url") self.project_name = parser.get("project", "name") auth = check_credentials(self.token) try: list_of_projects = auth.get_projects(state="active") except Exception: self.assertTrue(False, "Could not get list of projects") for project in list_of_projects: if project["name"] == self.project_name: self.project_id = project["id"] except NoSectionError: self.token = "INVALID_TOKEN" self.auth_url = "INVALID_AUTH_URL" self.base_url = "INVALID_APP_URL" self.project_name = "INVALID_PROJECT_NAME" print "Current authentication details are kept off source control. " "\nUpdate your .config.txt file in <projectroot>/.private/"
def __read_config(self): """ Obtiene la configuración de la persona de su archivo .person y devuelve True si es válida """ pparser = RawConfigParser() with codecs.open("config/" + self.person + '.person', 'r', encoding='utf-8') as cf: pparser.readfp(cf) if pparser.has_section("general"): if pparser.has_option('general', 'name'): self.name = pparser.get('general', 'name') if self.name == '': return False else: return False if pparser.has_option('general', 'notify'): self.notify = pparser.getboolean('general', 'notify') if pparser.has_option('general', 'alarm_threshold'): self.alarm_threshold = pparser.getint('general', 'alarm_threshold') if pparser.has_option('general', 'email'): self.email = pparser.get('general', 'email') if self.email == '' and self.notify: return False else: return False return True
def loadMeta(self): "Loads the 'meta' - variables that change with the server (worlds, admins, etc.)" config = ConfigParser() config.read("config/data/ranks.meta") specs = ConfigParser() specs.read("config/data/spectators.meta") lastseen = ConfigParser() lastseen.read("config/data/lastseen.meta") bans = ConfigParser() bans.read("config/data/bans.meta") worlds = ConfigParser() worlds.read("config/data/worlds.meta") # Read in the admins if config.has_section("admins"): for name in config.options("admins"): self.admins.add(name) # Read in the mods if config.has_section("mods"): for name in config.options("mods"): self.mods.add(name) if config.has_section("globalbuilders"): for name in config.options("globalbuilders"): self.globalbuilders.add(name) if config.has_section("members"): for name in config.options("members"): self.members.add(name) # Read in the directors if config.has_section("directors"): for name in config.options("directors"): self.directors.add(name) if config.has_section("silenced"): for name in config.options("silenced"): self.silenced.add(name) # Read in the spectators (experimental) if specs.has_section("spectators"): for name in specs.options("spectators"): self.spectators.add(name) bans = ConfigParser() bans.read("config/data/bans.meta") # Read in the bans if bans.has_section("banned"): for name in bans.options("banned"): self.banned[name] = bans.get("banned", name) # Read in the ipbans if bans.has_section("ipbanned"): for ip in bans.options("ipbanned"): self.ipbanned[ip] = bans.get("ipbanned", ip) # Read in the lastseen if lastseen.has_section("lastseen"): for username in lastseen.options("lastseen"): self.lastseen[username] = lastseen.getfloat("lastseen", username) # Read in the worlds if worlds.has_section("worlds"): for name in worlds.options("worlds"): if name is self.default_name: self.default_loaded = True else: self.worlds[self.default_name] = None if not self.default_loaded: self.worlds[self.default_name] = None
def parse_locators(file): """Parses locators configuration file and returns dictionary of locators. Arguments: file = locators file object (opened with open() method) Return: Dictionary of parsed locators.""" # parse file parser = RawConfigParser() parser.readfp(file) # get sections from file sections = parser.sections() # prepare locators dictionary locators = {} # don't add sections name # when only one section exists if len(sections) is 1: section = sections[0] for name in parser.options(section): locators[name] = parser.get(section, name) # add section name as a key # when more than one section exists else: for section in sections: locators[section] = {} for name in parser.options(section): locators[section][name] = parser.get(section, name) # return dictionary of parsed locators return locators
def __read_config(self): """ Obtiene la configuración de la persona de su archivo .person y devuelve True si es válida """ parser = RawConfigParser() with codecs.open("config/" + self.person + '.person', 'r', encoding='utf-8') as f: parser.readfp(f) if parser.has_section("metadata_crawling"): if parser.has_option('metadata_crawling', 'files'): self.__files =\ [e.strip() for e in parser.get('metadata_crawling', 'files').split(',')] if self.__files == ['']: return False else: return False if parser.has_option('metadata_crawling', 'weight'): self.__weight = parser.getint('metadata_crawling', 'weight') if parser.has_option('metadata_crawling', 'access_log'): self.__access_log = parser.get('metadata_crawling', 'access_log') if not self.__access_log: return False else: return False if parser.has_option('metadata_crawling', 'access_log_format'): self.__access_log_format = parser.get('metadata_crawling', 'access_log_format') if not self.__access_log_format: return False else: return False else: return False return True
def getAccount(account_key): from ConfigParser import RawConfigParser, NoOptionError, NoSectionError account_file = '.account' config = RawConfigParser() with open(account_file, 'r') as fp: config.readfp(fp) account = config.get('account', account_key) password = None password_section = 'password' try: password = config.get(password_section, account_key) except NoSectionError: config.add_section(password_section) except NoOptionError: pass aes = AESCipher(account) if password: return account, aes.decrypt(password).encode('UTF-8') from getpass import getpass password = getpass(account_key + ' of ' +account + "'s password: ") config.set(password_section, account_key, aes.encrypt(password)) with open(account_file, 'w') as fp: config.write(fp) return account, password
def upload(source_metadata): """ Uploads a given set of packages """ _ = source_metadata root_path = ROOT_PATH filename = '{0}/../settings.cfg'.format(root_path) settings = RawConfigParser() settings.read(filename) package_name = settings.get('packaging', 'package_name') package_path = SourceCollector.package_path.format(settings.get('packaging', 'working_dir'), package_name) redhat_folder = '{0}/redhat'.format(package_path) destination_folder = '/usr/share/repo/CentOS/7/x86_64/' destination_server = '172.20.3.17' user = '******' packages = os.listdir(redhat_folder) for package in packages: package_source_path = os.path.join(redhat_folder, package) command = 'scp {0} {1}@{2}:{3}'.format(package_source_path, user, destination_server, destination_folder) print('Uploading package {0}'.format(package)) SourceCollector.run(command, working_directory=redhat_folder) if len(packages) > 0: command = 'ssh {0}@{1} createrepo --update {2}'.format(user, destination_server, destination_folder) SourceCollector.run(command, working_directory=redhat_folder)
def upload(source_metadata): """ Uploads a given set of packages """ filename = "{0}/../settings.cfg".format(os.path.dirname(os.path.abspath(__file__))) settings = RawConfigParser() settings.read(filename) package_name = settings.get("packaging", "package_name") package_path = SourceCollector.package_path.format(settings.get("packaging", "working_dir"), package_name) version_string = source_metadata[1] new_package = version_string not in SourceCollector.run( command="ssh [email protected] \"grep '{0}_{1}-1_amd64' /data/www/apt/*/Packages\" || true".format( package_name, version_string ), working_directory=package_path, ) print "Uploading {0} package: {1}".format( "new" if new_package else "existing", "{0}_{1}-1_amd64".format(package_name, version_string) ) SourceCollector.run( command="dput -c {0}/debian/dput.cfg ovs-apt {0}/debian/{1}_{2}-1_amd64.changes".format( package_path, package_name, version_string ), working_directory=package_path, ) SourceCollector.run( command='ssh [email protected] "mini-dinstall -b{0}"'.format( "" if new_package else " --no-db" ), working_directory=package_path, )
def parse_config(self, filename): from ConfigParser import RawConfigParser import io config = RawConfigParser() config.readfp(io.open(filename, 'r', encoding='utf_8_sig')) for s in config.sections(): port = int(config.get(s, 'port')) config.remove_option(s, 'port') xsize, ysize = [int(d) for d in config.get(s, 'size').split(",")] config.remove_option(s, 'size') x_off, y_off = [int(d) for d in config.get(s, 'offset').split(",")] config.remove_option(s, 'offset') self.offsets[s] = (x_off, y_off) for device, offset in config.items(s): x_off, y_off = [int(d) for d in offset.split(",")] if self.offsets.has_key(device): if (x_off, y_off) != self.offsets[device]: raise RuntimeError("conflicting offsets for device %s" % device) self.offsets[device] = (x_off, y_off) if s in self.transtbl: self.transtbl[s].append(device) else: self.transtbl[s] = [device] if device in self.transtbl: self.transtbl[device].append(s) else: self.transtbl[device] = [s] self.add_virtual(s, xsize, ysize, port)
def collapse_arakoon(): """ Collapse Arakoon's Tlogs :return: None """ ScheduledTaskController._logger.info('Starting arakoon collapse') arakoon_clusters = [] for service in ServiceList.get_services(): if service.is_internal is True and \ service.type.name in (ServiceType.SERVICE_TYPES.ARAKOON, ServiceType.SERVICE_TYPES.NS_MGR, ServiceType.SERVICE_TYPES.ALBA_MGR): arakoon_clusters.append(service.name.replace('arakoon-', '')) for cluster in arakoon_clusters: ScheduledTaskController._logger.info(' Collapsing cluster {0}'.format(cluster)) contents = EtcdConfiguration.get(ArakoonClusterConfig.ETCD_CONFIG_KEY.format(cluster), raw=True) parser = RawConfigParser() parser.readfp(StringIO(contents)) nodes = {} for node in parser.get('global', 'cluster').split(','): node = node.strip() nodes[node] = ([str(parser.get(node, 'ip'))], int(parser.get(node, 'client_port'))) config = ArakoonClientConfig(str(cluster), nodes) for node in nodes.keys(): ScheduledTaskController._logger.info(' Collapsing node: {0}'.format(node)) client = ArakoonAdmin(config) try: client.collapse(str(node), 2) except: ScheduledTaskController._logger.exception('Error during collapsing cluster {0} node {1}'.format(cluster, node)) ScheduledTaskController._logger.info('Arakoon collapse finished')
def get_firefox_profiles(self, directory): """ List all profiles """ cp = RawConfigParser() profile_list = [] try: cp.read(os.path.join(directory, 'profiles.ini')) for section in cp.sections(): if section.startswith('Profile') and cp.has_option(section, 'Path'): profile_path = None if cp.has_option(section, 'IsRelative'): if cp.get(section, 'IsRelative') == '1': profile_path = os.path.join(directory, cp.get(section, 'Path').strip()) elif cp.get(section, 'IsRelative') == '0': profile_path = cp.get(section, 'Path').strip() else: # No "IsRelative" in profiles.ini profile_path = os.path.join(directory, cp.get(section, 'Path').strip()) if profile_path: profile_list.append(profile_path) except Exception as e: self.error(u'An error occurred while reading profiles.ini: {}'.format(e)) return profile_list
def __init__(self, filename, mixer): from ConfigParser import RawConfigParser config = RawConfigParser() config.read(filename) self.play_intro = config.getboolean('Main', 'play_intro') if mixer: self.mixer = config.getboolean('Main', 'mixer') else: self.mixer = False self.music = GetDataPath(config.get('Main', 'music')) self.music = os.path.expanduser(self.music) width = config.getint('Defaults', 'width') height = config.getint('Defaults', 'height') self.subsettings = {} sections = config.sections() for defaults in ('Main', 'Defaults'): sections.remove(defaults) for sec in sections: op_dict = {} for op in config.options(sec): op_dict[op] = config.get(sec, op) if op in ('width', 'height'): op_dict[op] = eval(op_dict[op]) for op in ('width', 'height'): if op not in op_dict or op_dict[op] == 0: op_dict[op] = locals()[op] self.subsettings[sec] = op_dict
def __init__(self, tweet_config, db_path, connect_time = 10): logging.debug('Twython Service: Initializing Twython Service') if(not os.path.isfile(tweet_config)): error_msg = 'Twython Service: Invalid twitter config file: ' + tweet_config logging.error(error_msg) raise TwythonServiceError(error_msg); try: config = RawConfigParser() config.read(tweet_config) self.__twitter = Twython(config.get('TweetAuth','twitter_token'), config.get('TweetAuth','twitter_secret'), config.get('TweetAuth','oauth_token'), config.get('TweetAuth','oauth_token_secret')) logging.debug('Twython Service: Loaded twitter configuration') self.__wait_time = (1,2,4,8,16,32,64,128,64,32,16,8,4,2,1) self.__wait_index = -1 self.__connect_time = connect_time self.__database = Database(db_path) self.__tweet_ready = threading.Event() self.__tweet_ready.set() self.__is_alive = True self.__process_thread = threading.Thread(target=self.__process_tweets) self.__process_thread.start() except NoSectionError, NoOptionError: error_msg = 'Twython Service: Twitter initialization failed' logging.debug(error_msg) raise TwythonServiceError(error_msg);
def init_config(config_file, _reactor, group): '''initialize configuration configuration file should be in the form: [transports] ipN=host:port serN=COMX ''' cp = RawConfigParser() cp.read(config_file) if cp.has_option('global','debug'): global_config.loop_interval = 5.0 global_config.debug = True if cp.has_option('global','smtphost'): global_config.smtphost = cp.get('global','smtphost') if cp.has_option('global','smtpfrom'): global_config.smtpfrom = cp.get('global','smtpfrom') if cp.has_option('global','smtpto'): tos = [x.strip() for x in cp.get('global','smtpto').split(',')] global_config.smtpto = tos section = 'transports' for op in cp.options(section): value = cp.get(section, op) if op.startswith('ip'): ip, port = value.split(':') _reactor.connectTCP(ip, int(port), group.factory()) elif op.startswith('ser'): serialport.SerialPort(group.protocol(),value,_reactor)
def get_client(client_type=None): """ Returns a volatile storage client """ if not hasattr(VolatileFactory, 'store') or VolatileFactory.store is None: if client_type is None: client_type = Configuration.get('ovs.core.storage.volatile') VolatileFactory.store = None if client_type == 'memcache': from ovs.extensions.storage.volatile.memcachestore import MemcacheStore memcache_servers = list() memcache_config = RawConfigParser() memcache_config.read(os.path.join(Configuration.get('ovs.core.cfgdir'), 'memcacheclient.cfg')) nodes = [node.strip() for node in memcache_config.get('main', 'nodes').split(',')] nodes.sort() for node in nodes: location = memcache_config.get(node, 'location') memcache_servers.append(location) VolatileFactory.store = MemcacheStore(memcache_servers) if client_type == 'default': from ovs.extensions.storage.volatile.dummystore import DummyVolatileStore VolatileFactory.store = DummyVolatileStore() if VolatileFactory.store is None: raise RuntimeError('Invalid client_type specified') return VolatileFactory.store
def __init__(self): paths = [ 'python-filebutler.conf', '/etc/python-filebutler.conf', ] config = RawConfigParser() if not config.read(paths): sys.exit("Couldn't read configuration file") self.secret_key = config.get('settings', 'secret_key') self.storage_path = config.get('settings', 'storage_path') # Create tables try: File.create_table() except sqlite3.OperationalError as e: if 'table "file" already exists' in e: pass else: sys.exit(e) try: User.create_table() except sqlite3.OperationalError as e: if 'table "user" already exists' in e: pass else: sys.exit(e)
def setUp(self): super(TestFTPConnector, self).setUp() config = RawConfigParser() config.read(os.path.join(os.path.dirname(__file__), "resources/ftp/ftp_config.ini")) self.host_download = config.get("ftp", "host.download") self.user_name = config.get("ftp", "user.name") self.password = config.get("ftp", "password") self.path = config.get("ftp", "path") ftp = ftp_client(self.host_download, "{0}".format(BASE_DIR), self.user_name, self.password) self.assertFalse(ftp.exists()) if not ftp.exists(): ftp.create(make_dir=True, create_parents=True) self.assertTrue(ftp.exists()) self.assertTrue(ftp.is_directory()) ftp = ftp_client(self.host_download, "{0}/file".format(BASE_DIR), self.user_name, self.password) self.assertFalse(ftp.exists()) if not ftp.exists(): ftp.create(make_dir=False, create_parents=True) self.assertTrue(ftp.exists()) self.assertFalse(ftp.is_directory()) ftp = ftp_client(self.host_download, "{0}/folder/file".format(BASE_DIR), self.user_name, self.password) self.assertFalse(ftp.exists()) if not ftp.exists(): ftp.create(make_dir=False, create_parents=True) self.assertTrue(ftp.exists()) self.assertFalse(ftp.is_directory())
def test_dumped_values_in_tracini(self): parser = RawConfigParser() filename = self.env.config.filename self.assertEqual([filename], parser.read(filename)) self.assertEqual('#cc0,#0c0,#0cc,#00c,#c0c,#c00', parser.get('revisionlog', 'graph_colors')) self.assertEqual('disabled', parser.get('trac', 'secure_cookies'))
DEBUG = config.getboolean('debug','DEBUG') TEMPLATE_DEBUG = DEBUG #DEBUG_PROPAGATE_EXCEPTIONS = DEBUG XMLRPC_DEBUG = False ALLOWED_HOSTS = ['*'] USE_TZ = True SITE_ID = 1 TESTSYTEM = config.getboolean('debug','TESTSYTEM') ADMINS = tuple(config.items('admins')) MANAGERS = ADMINS EMAIL_SUBJECT_PREFIX = 'yats-dev' EMAIL_HOST = config.get('mail', 'EMAIL_HOST') EMAIL_PORT = config.get('mail', 'EMAIL_PORT') SERVER_EMAIL = config.get('mail', 'SERVER_EMAIL') EMAIL_HOST_USER = config.get('mail', 'EMAIL_HOST_USER') EMAIL_HOST_PASSWORD = config.get('mail', 'EMAIL_HOST_PASSWORD') #DATABASE_ROUTERS = ['web.routers.ModelRouter'] DATABASES = { 'default': { 'ENGINE': config.get('database', 'DATABASE_ENGINE'), 'NAME': config.get('database', 'DATABASE_NAME'), 'USER': config.get('database', 'DATABASE_USER'), 'PASSWORD': config.get('database', 'DATABASE_PASSWORD'), 'HOST': config.get('database', 'DATABASE_HOST'), 'PORT': config.get('database', 'DATABASE_PORT'), 'ATOMIC_REQUESTS': config.get('database', 'ATOMIC_REQUESTS')
def mexhelpextract(mexnames): #print 'processing mex files: ' + mexnames.__repr__() from ConfigParser import RawConfigParser as ConfigParser, Error as error for mexname in mexnames: # ConfigParser for the three elements per subfunctions written to tmpdir # [SubFunction] # usage: 'xyz' # help: 'xyz' # seealso: 'xyz' config = ConfigParser({'usage':[], 'help':[], 'seealso':[]}) # assemble command line for matlab matlabcmd = 'addpath(\'%s\');%s(\'%s\',\'%s\'); exit' % \ (_tmpdir, \ os.path.splitext(os.path.basename(_mexscript))[0], \ mexname, \ _tmpdir) cmd = 'matlab -nojvm -nodisplay -r "%s" > /dev/null' % matlabcmd # and execute matlab w/ the temporary script we wrote earlier try: print 'running MATLAB for %s in %s' % (mexname,_tmpdir) p = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True) stderr = p.communicate()[1] if stderr: print stderr except: print 'could not dump help for %s into %s.' % (mexname,_tmpdir) cfgfile = config.read(os.path.join(_tmpdir,mexname)) if cfgfile == []: print "skipping " + mexname + " (no output)" continue subfunctions = config.sections() print 'processing subfunctions: ' + subfunctions.__repr__() for subfunction in subfunctions: # read in the strings for this subfunction usage = config.get(subfunction,'usage') help = config.get(subfunction,'help') seealso = config.get(subfunction,'seealso') headline = '===[['+subfunction+' '+mexname+'(\''+subfunction+'\')]]===\n' breadcrumb = "==[[Psychtoolbox]] › [[" \ + mexname + "]].{mex*,dll} subfunction==\n\n" # scrub the text for main text only body = beackern(help) docstring = '' \ + '%%(matlab;Usage)' \ + usage \ + '%%\n' \ + body \ + '\n\n' if seealso: docstring = docstring + '<<=====See also:=====\n' + seealso + '<<' text = '""' + headline \ + breadcrumb \ + docstring + '""' # retrieve old body text, to update or concatenate with synonymous subfunctions # # browse the page title = re.sub("[^\w]|_","",subfunction) try: resp = mech.open(baseurl+title+"/edit") except HTTPError, e: sys.exit("retrieving old text during posting of this mex function failed: %d: %s" % (e.code, e.msg)) # get text from the edit form mech.select_form(nr=1) try: oldbody = mech["body"] except: print 'No id="body" form. Figure this out first. cf. page text above.' for form in mech.forms(): print form sys.exit("retrieving old body text failed while processing page: " + baseurl + title +'/edit') # parse embedded structuring HTML tags in the wiki text soup = BeautifulSoup(oldbody) # check if the subfunction is already present, by CSS 'class' and 'id' subfct = soup.find('div', {'class' : "subfct", 'id' : mexname}) if subfct: # replace the text of the container DIV subfct.contents[0].replaceWith(text) else: # contruct new DIV to hold the text subfctDIV = Tag(soup, "div") subfctDIV['class'] = 'subfct' subfctDIV['id'] = mexname subfctDIV.insert(0,NavigableString(text)) # insert the new div soup.insert(len(soup),subfctDIV) # Now scoop the good well-formed divs out of the soup divs = soup('div', {'class' : "subfct"}) # and drop them into fresh yummy cheese soup cheesesoup = BeautifulSoup() # drop good divs into the soup, one by one for div in divs: # remove the unneeded style attribute, we finally # have this stuff defined in the ptbdocs.css now. del(div['style']) # escape the HTML tags for wiki parser cheesesoup.append(NavigableString('\n""')) cheesesoup.append(div) cheesesoup.append(NavigableString('""\n')) post(subfunction,cheesesoup.renderContents())
# Parse command line options parser = OptionParser() parser.add_option( "-n", "--new", action="store_true", default=False, help="Only convert new users (users that are not in the samba4 directory)") (cmdline_opts, args) = parser.parse_args() # Parse configuration config = RawConfigParser() config.read("od2samba4.conf") od_password = config.get("opendirectory", "password") outfile_new_name = config.get("files", "newusers_ldif") outfile_all_name = config.get("files", "users_ldif") od_username = config.get("opendirectory", "username") od_url = config.get("opendirectory", "url") od_dc = config.get("opendirectory", "dc") samba4_dc = config.get("samba4", "dc") samba4_url = config.get("samba4", "url") samba4_username = config.get("samba4", "username") samba4_password = config.get("samba4", "password") samba4_upn_realm = config.get("samba4", "upn_realm") nis_domain = config.get("samba4", "nis_domain") outfile_name = (outfile_new_name if cmdline_opts.new else outfile_all_name) USERATTRIBUTES = [
# Copyright (c) 2009-2012 Simplistix Ltd # See license.txt for license details. import os from ConfigParser import RawConfigParser from setuptools import setup, find_packages package_name = 'checker' base_dir = os.path.dirname(__file__) # read test requirements from tox.ini config = RawConfigParser() config.read(os.path.join(base_dir, 'tox.ini')) test_requires = [] for item in config.get('testenv', 'deps').split(): test_requires.append(item) # Tox doesn't need itself, but we need it for testing. test_requires.append('tox') setup(name=package_name, version=file(os.path.join(base_dir, package_name, 'version.txt')).read().strip(), author='Chris Withers', author_email='*****@*****.**', license='MIT', description="A tool for checking system configuration.", long_description=open(os.path.join(base_dir, 'docs', 'description.txt')).read(), url='http://www.simplistix.co.uk/software/python/checker', classifiers=[ 'Development Status :: 5 - Production/Stable',
from flask import Flask, request, json, make_response, abort, redirect application = Flask(__name__) # fullname can include the sr name and a codename, leave room for those MAX_FULLNAME_LENGTH = 128 def jsonpify(callback_name, data): data = callback_name + '(' + json.dumps(data) + ')' response = make_response(data) response.mimetype = 'text/javascript' return response config = RawConfigParser() config.read(['production.ini']) tracking_secret = config.get('DEFAULT', 'tracking_secret') adtracker_url = config.get('DEFAULT', 'adtracker_url') @application.route('/fetch-trackers') def fetch_trackers(): ip = request.environ['REMOTE_ADDR'] jsonp_callback = request.args['callback'] ids = request.args.getlist('ids[]') if len(ids) > 32: abort(400) hashed = {} for fullname in ids: if len(fullname) > MAX_FULLNAME_LENGTH: continue
STATIC_ROOT = os.path.join(BASE_DIR, "static/") config = RawConfigParser() config.read(os.path.join(BASE_DIR, 'tinflix/settings.ini')) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '0mlhbte+-7s8m*%!+*vs7+7th50)qy^y=(4$+*-^t_od6enry@' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] SOCIAL_AUTH_FACEBOOK_KEY = config.get('secrets', 'SOCIAL_AUTH_FACEBOOK_KEY') SOCIAL_AUTH_FACEBOOK_SECRET = config.get('secrets', 'SOCIAL_AUTH_FACEBOOK_SECRET') # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'social.apps.django_app.default', 'django.contrib.staticfiles', 'tinflixer.apps.TinflixerConfig', 'movie.apps.MovieConfig' ]
"""Some utility functions to access the AgileZen API (v1).""" import os.path import httplib import json from ConfigParser import RawConfigParser # AgileZen-related constants, read API key from cfg. CFG_PATH = os.path.join(os.path.dirname(__file__), '..', 'notify.cfg') CONFIG = RawConfigParser() CONFIG.read(CFG_PATH) API_KEY = CONFIG.get('api', 'key') API_DOMAIN = 'agilezen.com' API_PATH_PREFIX = '/api/v1' API_HEADERS = {"X-Zen-ApiKey": API_KEY, "Content-Type": "application/json"} PROJECTS_URL = 'https://agilezen.com/api/v1/projects' def get_projects(): """Return a list of all projects.""" conn = httplib.HTTPSConnection(API_DOMAIN) conn.request("GET", PROJECTS_URL, headers=API_HEADERS) response = conn.getresponse().read() return _parse_response(response) def get_people(project_id, role=None): """Return a list of members (dictionaries) that have the given role in the project. If role is None, return all members. """ path = API_PATH_PREFIX + '/projects/' + str(project_id) + '?with=roles'
# > import sys from models.local import * from models.ssh import * from models.moveit_utils import * # Configruation variables for the various hosts from ConfigParser import RawConfigParser config = RawConfigParser() config.read('config/secrets.ini') local = Local() remote = SSH( config.get('webfaction-ssh', 'domain'), config.get('webfaction-ssh', 'user'), config.get('webfaction-ssh', 'pass'), ) local_directory = sys.argv[1] remote_directory = sys.argv[2] local.cd(local_directory) remote.cd(remote_directory) # Fetch the remote URL so we can fix things once the db is moved remote_url = 'http://report.busfedcivic.org' #wordpress_command(remote, 'echo get_option("siteurl");' )[0] local_url = wordpress_command(local, 'echo get_option("siteurl");') local_db = get_wordpress_data(local)
# Debug variables DEBUG = config.getboolean('debug', 'DEBUG') TEMPLATE_DEBUG = config.getboolean('debug', 'TEMPLATE_DEBUG') DEBUG_TOOLBAR = config.getboolean('debug', 'DEBUG_TOOLBAR') # Admins and managers if DEBUG: ADMINS = tuple() else: ADMINS = tuple(config.items('admins')) MANAGERS = ADMINS # Allowed hosts (only used in production) try: _allowed_hosts = config.get('security', 'ALLOWED_HOSTS') ALLOWED_HOSTS = [ x for x in _allowed_hosts.replace(' ', '').split(',') if x ] except (NoSectionError, NoOptionError): ALLOWED_HOSTS = [] # Security try: CSRF_COOKIE_SECURE = config.getboolean('security', 'CSRF_COOKIE_SECURE') except (NoSectionError, NoOptionError): CSRF_COOKIE_SECURE = False # Database configuration DATABASES = {} try:
#!/usr/bin/python import pymongo from datetime import datetime, timedelta import os from ConfigParser import RawConfigParser config = RawConfigParser() config_paths = ['jira.cfg', '../jira.cfg'] config.read([os.path.join(os.getcwd(), path) for path in config_paths]) conn = pymongo.MongoClient(config.get('Mongo', 'uri')) metrics = conn.metrics def MakeHTML(): cursor = metrics.catalog.find({'type': 'weekly'}) cursor.sort([('date', pymongo.DESCENDING)]) yield('<title>Support Metrics Index</title>') yield('<h2>Support Metrics Index</h2>') yield('TODO: make this page less of a Spartan eyesore.') yield('<p>Metrics for week ending:') yield('<ul>') for row in cursor: yesterday = row['date'] - timedelta(days=1) yield('<li><a href="%s">%s</a>' % ( row['url'], yesterday.strftime('%Y-%m-%d %A'))) yield('</ul>') def Main():
def load_config(self, environ): """Load configuration options Options are read from a config file. Backwards compatibility: - if ConfigFile is not set, opts are loaded from http config - if ConfigFile is set, then the http config must not provide Koji options - In a future version we will load the default hub config regardless - all PythonOptions (except koji.web.ConfigFile) are now deprecated and support for them will disappear in a future version of Koji """ modpy_opts = environ.get('modpy.opts', {}) if 'modpy.opts' in environ: cf = modpy_opts.get('koji.web.ConfigFile', None) cfdir = modpy_opts.get('koji.web.ConfigDir', None) # to aid in the transition from PythonOptions to web.conf, we do # not check the config file by default, it must be configured if not cf and not cfdir: self.logger.warn( 'Warning: configuring Koji via PythonOptions is deprecated. Use web.conf' ) else: cf = environ.get('koji.web.ConfigFile', '/etc/kojiweb/web.conf') cfdir = environ.get('koji.web.ConfigDir', '/etc/kojiweb/web.conf.d') if cfdir: configs = koji.config_directory_contents(cfdir) else: configs = [] if cf and os.path.isfile(cf): configs.append(cf) if configs: config = RawConfigParser() config.read(configs) elif modpy_opts: # presumably we are configured by modpy options config = None else: raise koji.GenericError("Configuration missing") opts = {} for name, dtype, default in self.cfgmap: if config: key = ('web', name) if config.has_option(*key): if dtype == 'integer': opts[name] = config.getint(*key) elif dtype == 'boolean': opts[name] = config.getboolean(*key) elif dtype == 'list': opts[name] = [ x.strip() for x in config.get(*key).split(',') ] else: opts[name] = config.get(*key) else: opts[name] = default else: if modpy_opts.get(name, None) is not None: if dtype == 'integer': opts[name] = int(modpy_opts.get(name)) elif dtype == 'boolean': opts[name] = modpy_opts.get(name).lower() in ('yes', 'on', 'true', '1') else: opts[name] = modpy_opts.get(name) else: opts[name] = default if 'modpy.conf' in environ: debug = environ['modpy.conf'].get('PythonDebug', '0').lower() opts['PythonDebug'] = (debug in ['yes', 'on', 'true', '1']) opts['Secret'] = koji.util.HiddenValue(opts['Secret']) self.options = opts return opts
def __init__(self, config_filename): self.config_filename = config_filename self.users = {} self.libraries = {} # ConfigParser with some default values config = RawConfigParser({ 'admin-name': 'Admin', # not really required so set default 'returns-days': '1234567', # can return books every day of week }) config.read(config_filename) usernames = config.get('libalert', 'users').split(',') for username in usernames: # Load all user settings username = username.strip() self.users[username] = { 'email': config.get(username, 'email'), 'number': config.get(username, 'number'), 'pin': config.get(username, 'pin'), 'library': config.get(username, 'library'), 'days-notice': config.getint(username, 'days-notice'), 'returns-days': parse_weekday_set(config.get(username, 'returns-days')) } # Load library details library = self.users[username]['library'] if library not in self.libraries: self.libraries[library] = { 'type': config.get(library, 'server-type'), 'url': config.get(library, 'server-url'), } self.admin_name = config.get('libalert', 'admin-name') self.admin_email = config.get('libalert', 'admin-email') self.smtp_server = config.get('libalert', 'smtp-server') self.smtp_email = config.get('libalert', 'smtp-email') self.smtp_password = config.get('libalert', 'smtp-password') self.debug_level = config.getint('libalert', 'debug-level')
def main(args): config = ConfigParser({"htrc": False}) config.read(args.config_file) if args.lang is None: args.lang = [] args.corpus_path = config.get("main", "corpus_file") c = Corpus.load(args.corpus_path) # check for htrc metadata if args.htrc or config.get("main", "htrc"): htrc_langs = get_htrc_langs(args) if htrc_langs: args.lang.extend(htrc_langs) # Apply stop words for lang in args.lang: print "Applying", langs[lang], "stopwords" c = stop_language(c, langs[lang]) # Apply custom stopwords file if args.stopword_file: print "Applying custom stopword file" with open(args.stopword_file, encoding='utf8') as swf: c = c.apply_stoplist([unidecode(word.strip()) for word in swf]) if not args.high_filter: high_filter, candidates = get_high_filter(args, c) else: high_filter = args.high_filter candidates = get_candidate_words(c, args.high_filter) if high_filter > 0: print "Applying frequency filter > ", high_filter c = c.apply_stoplist(candidates) if not args.low_filter: low_filter, candidates = get_low_filter(args, c) else: low_filter = args.low_filter candidates = get_candidate_words(c, -1 * args.low_filter) if low_filter > 0: print "Applying frequency filter > ", low_filter c = c.apply_stoplist(candidates) def name_corpus(dirname, languages, lowfreq=None, highfreq=None): items = itemfreq(c.corpus) counts = items[:, 1] corpus_name = [dirname] if args.lang: corpus_name.append('nltk') corpus_name.append(''.join(args.lang)) if lowfreq > 0: corpus_name.append('freq%s' % lowfreq) else: corpus_name.append('freq%s' % min(counts)) if highfreq > 0: corpus_name.append('N%s' % highfreq) else: corpus_name.append('freq%s' % max(counts)) corpus_name = '-'.join(corpus_name) corpus_name += '.npz' return corpus_name dirname = os.path.basename(args.corpus_path).split('-nltk-')[0].replace( '.npz', '') corpus_name = name_corpus(dirname, ['en'], low_filter, high_filter) model_path = os.path.dirname(args.corpus_path) args.corpus_path = os.path.join(model_path, corpus_name) c.save(args.corpus_path) config.set("main", "corpus_file", args.corpus_path) with open(args.config_file, 'wb') as configfh: config.write(configfh)
from models.ssh import * from models.local import * from models.webfaction import * # Configruation variables for the various hosts from ConfigParser import RawConfigParser config = RawConfigParser() config.read('../config/secrets.ini') # Uses sites.txt to determin which sites to move sites = file('../config/sites.txt').read().split("\n") # Importing the sites # Bluehost SSH connection bluehost = SSH( config.get('bluehost-ssh', 'domain'), config.get('bluehost-ssh', 'user'), config.get('bluehost-ssh', 'pass'), ) # Local thing local = Local() for raw_site in sites: site = Site(raw_site) site.backup_bluehost(bluehost) site.local_backup(local,config.get('bluehost-ssh', 'user')+'@'+config.get('bluehost-ssh', 'domain'))
# DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THIS PACKAGE. import os import time import fcntl import logging from collections import deque from willie.tools import Nick from willie.module import rule, priority, event from ConfigParser import RawConfigParser conf = RawConfigParser() conf.read('/home/kunwon1/faucet/faucet.conf') logging.basicConfig(format='%(asctime)s %(message)s', level=logging.DEBUG, \ filename=conf.get('main', 'basedir') + \ conf.get('main', 'logfile') ) @rule('(.*)') @priority('low') def chanmsg(bot, trigger): if trigger.sender.startswith('#'): writeChannelMessage(trigger) @rule('(.*)') @priority('low') @event('JOIN') def joinmsg(bot, trigger): writeEventMessage(trigger)
def get(self, section, option): return unicodify(RawConfigParser.get(self, section, option))
#!/usr/local/python26/bin/python dir = '/usr/local/var/headphones/' try: pid = int(open(dir + 'headphones.pid', 'rt').readline()) except IOError, ValueError: pass else: from os import path, environ if path.isdir('/proc/%d' % pid): from ConfigParser import RawConfigParser cfg = RawConfigParser() cfg.add_section('General') cfg.set('General', 'http_port', '8181') cfg.read(dir + 'config.ini') host = environ['HTTP_HOST'].split(':')[0] response = 'Location: http://%s:%s/\n' % ( host, cfg.get('General', 'http_port')) print(response)
class MyConfig(MyLog): #---------------------MyConfig::__init__------------------------------------ def __init__(self, filename=None, section=None, log=None): super(MyLog, self).__init__() self.log = log self.FileName = filename self.Section = section self.CriticalLock = threading.Lock( ) # Critical Lock (writing conf file) self.InitComplete = False self.LogLocation = "/var/log/" self.Latitude = 51.4769 self.Longitude = 0 self.SendRepeat = 1 self.UseHttps = False self.HTTPPort = 80 self.HTTPSPort = 443 self.RTS_Address = "0x279620" self.MQTT_ClientID = "somfy-mqtt-bridge" self.Shutters = {} self.ShuttersByName = {} self.Schedule = {} self.Password = "" try: self.config = RawConfigParser() self.config.read(self.FileName) if self.Section == None: SectionList = self.GetSections() if len(SectionList): self.Section = SectionList[0] except Exception as e1: self.LogErrorLine("Error in MyConfig:init: " + str(e1)) return self.InitComplete = True # -------------------- MyConfig::LoadConfig----------------------------------- def LoadConfig(self): parameters = { 'LogLocation': str, 'Latitude': float, 'Longitude': float, 'SendRepeat': int, 'UseHttps': bool, 'HTTPPort': int, 'HTTPSPort': int, 'TXGPIO': int, 'RTS_Address': str, "Password": str } self.SetSection("General") for key, type in parameters.items(): try: if self.HasOption(key): setattr(self, key, self.ReadValue(key, return_type=type)) except Exception as e1: self.LogErrorLine( "Missing config file or config file entries in Section General for key " + key + ": " + str(e1)) return False parameters = { 'MQTT_Server': str, 'MQTT_Port': int, 'MQTT_User': str, 'MQTT_Password': str, 'MQTT_ClientID': str, 'EnableDiscovery': bool } self.SetSection("MQTT") for key, type in parameters.items(): try: if self.HasOption(key): setattr(self, key, self.ReadValue(key, return_type=type)) except Exception as e1: self.LogErrorLine( "Missing config file or config file entries in Section General for key " + key + ": " + str(e1)) return False self.SetSection("Shutters") shutters = self.GetList() for key, value in shutters: try: param1 = value.split(",") if param1[1].strip().lower() == 'true': if (len(param1) < 3): param1.append("10") elif (param1[2].strip() == "") or (int( param1[2]) <= 0) or (int(param1[2]) >= 100): param1[2] = "10" param2 = int( self.ReadValue(key, section="ShutterRollingCodes", return_type=int)) param3 = self.ReadValue( key, section="ShutterIntermediatePositions", return_type=int) if (param3 != None) and ((param3 < 0) or (param3 > 100)): param3 = None self.Shutters[key] = { 'name': param1[0], 'code': param2, 'duration': int(param1[2]), 'intermediatePosition': param3 } self.ShuttersByName[param1[0]] = key except Exception as e1: self.LogErrorLine( "Missing config file or config file entries in Section Shutters for key " + key + ": " + str(e1)) return False self.SetSection("Scheduler") schedules = self.GetList() for key, value in schedules: try: param = value.split(",") if param[0].strip().lower() in ('active', 'paused'): self.Schedule[key] = { 'active': param[0], 'repeatType': param[1], 'repeatValue': param[2], 'timeType': param[3], 'timeValue': param[4], 'shutterAction': param[5], 'shutterIds': param[6] } except Exception as e1: self.LogErrorLine( "Missing config file or config file entries in Section Scheduler for key " + key + ": " + str(e1)) return False return True #---------------------MyConfig::setLocation--------------------------------- def setLocation(self, lat, lng): self.WriteValue("Latitude", lat, section="General") self.WriteValue("Longitude", lng, section="General") self.Latitude = lat self.Longitude = lng #---------------------MyConfig::setCode--------------------------------- def setCode(self, shutterId, code): self.WriteValue(shutterId, str(code), section="ShutterRollingCodes") self.Shutters[shutterId]['code'] = code #---------------------MyConfig::HasOption----------------------------------- def HasOption(self, Entry): return self.config.has_option(self.Section, Entry) #---------------------MyConfig::GetList------------------------------------- def GetList(self): return self.config.items(self.Section) #---------------------MyConfig::GetSections--------------------------------- def GetSections(self): return self.config.sections() #---------------------MyConfig::SetSection---------------------------------- def SetSection(self, section): # if not (isinstance(section, str) or isinstance(section, unicode)) or not len(section): if not len(section): self.LogError("Error in MyConfig:ReadValue: invalid section: " + str(section)) return False self.Section = section return True #---------------------MyConfig::ReadValue----------------------------------- def ReadValue(self, Entry, return_type=str, default=None, section=None, NoLog=False): try: if section != None: self.SetSection(section) if self.config.has_option(self.Section, Entry): if return_type == str: return self.config.get(self.Section, Entry) elif return_type == bool: return self.config.getboolean(self.Section, Entry) elif return_type == float: return self.config.getfloat(self.Section, Entry) elif return_type == int: if self.config.get(self.Section, Entry) == 'None': return None else: return self.config.getint(self.Section, Entry) else: self.LogErrorLine( "Error in MyConfig:ReadValue: invalid type:" + str(return_type)) return default else: return default except Exception as e1: if not NoLog: self.LogErrorLine("Error in MyConfig:ReadValue: " + Entry + ": " + str(e1)) return default #---------------------MyConfig::WriteSection-------------------------------- def WriteSection(self, SectionName): SectionList = self.GetSections() if SectionName in SectionList: self.LogError("Error in WriteSection: Section already exist.") return True try: with self.CriticalLock: with open(self.FileName, "a") as ConfigFile: ConfigFile.write("[" + SectionName + "]") ConfigFile.flush() ConfigFile.close() # update the read data that is cached self.config.read(self.FileName) return True except Exception as e1: self.LogErrorLine("Error in WriteSection: " + str(e1)) return False #---------------------MyConfig::WriteValue---------------------------------- def WriteValue(self, Entry, Value, remove=False, section=None): if section != None: self.SetSection(section) SectionFound = False try: with self.CriticalLock: Found = False ConfigFile = open(self.FileName, 'r') FileList = ConfigFile.read().splitlines() ConfigFile.close() mySectionStart = -1 mySectionEnd = -1 myLine = -1 currentLastDataLine = -1 for i, line in enumerate(FileList): if self.LineIsSection(line) and self.Section.lower( ) == self.GetSectionName(line).lower(): mySectionStart = i elif mySectionStart >= 0 and mySectionEnd == -1 and len( line.strip().split('=')) >= 2 and ( line.strip().split('='))[0].strip() == Entry: myLine = i elif mySectionStart >= 0 and mySectionEnd == -1 and self.LineIsSection( line): mySectionEnd = currentLastDataLine if not line.isspace() and not len( line.strip()) == 0 and not line.strip()[0] == "#": currentLastDataLine = i if mySectionStart >= 0 and mySectionEnd == -1: mySectionEnd = currentLastDataLine self.LogDebug("CONFIG FILE WRITE ->> mySectionStart = " + str(mySectionStart) + ", mySectionEnd = " + str(mySectionEnd) + ", myLine = " + str(myLine)) if mySectionStart == -1: raise Exception("NOT ABLE TO FIND SECTION:" + self.Section) ConfigFile = open(self.FileName, 'w') for i, line in enumerate(FileList): if myLine >= 0 and myLine == i and not remove: # I found my line, now write new value ConfigFile.write(Entry + " = " + Value + "\n") elif myLine == -1 and mySectionEnd == i: # Here we have to insert the new record... ConfigFile.write(line + "\n") ConfigFile.write(Entry + " = " + Value + "\n") else: # Nothing special, just copy the previous line.... ConfigFile.write(line + "\n") ConfigFile.flush() ConfigFile.close() # update the read data that is cached self.config.read(self.FileName) return True except Exception as e1: self.LogError("Error in WriteValue: " + str(e1)) return False #---------------------MyConfig::GetSectionName------------------------------ def GetSectionName(self, Line): Line = Line.strip() if Line.startswith("[") and Line.endswith("]") and len(Line) >= 3: Line = Line.replace("[", "") Line = Line.replace("]", "") return Line return "" #---------------------MyConfig::LineIsSection------------------------------- def LineIsSection(self, Line): Line = Line.strip() if Line.startswith("[") and Line.endswith("]") and len(Line) >= 3: return True return False
def read_key_from_config(): config = RawConfigParser() config.read('domLink.cfg') return config.get('API_KEYS', 'whoxy')
handler.setFormatter(formatter) # Log - Add the handlers to the logger logger.addHandler(handler) ########################################## # Start worker logger.info('================Start working================') # Get config file list_host = [] list_tag = [] config = RawConfigParser() config.read('./config/Config.ini') num_host = config.getint('Initialization', 'number_of_host') for i in range(num_host): hostname = 'Host' + (str)(i + 1) host = config.get(hostname, 'domain') list_host.append(host) taglist = config.get(hostname, 'tag_list') list_tag.append(taglist) # Create connection, channel to RabbitMQ rabbit = pika.BlockingConnection(pika.ConnectionParameters('localhost')) logger.info('Connect to RabbitMQ-server') channel_parse = rabbit.channel() channel_insert = rabbit.channel() logger.info('Create 2 channel : channel_parse, channel_insert') # Create message queue channel_insert.queue_declare(queue='QUEUE_INSERT', durable=True) logger.info('channel_insert create queue : QUEUE_INSERT (durable = True)') channel_parse.queue_declare(queue='QUEUE_PARSE', durable=True) logger.info('channel_parse create queue : QUEUE_PARSER (durable = True)') # Receive message from QUEUE_PARSE
class Config(object): def __init__(self, buildout_dir): self.cfg_path = os.path.join(buildout_dir, '.mr.developer.cfg') self._config = RawConfigParser() self._config.optionxform = lambda s: s self._config.read(self.cfg_path) self.develop = {} self.buildout_args = [] self.rewrites = [] if self._config.has_section('develop'): for package, value in self._config.items('develop'): value = value.lower() if value == 'true': self.develop[package] = True elif value == 'false': self.develop[package] = False elif value == 'auto': self.develop[package] = 'auto' else: raise ValueError( "Invalid value in 'develop' section of '%s'" % self.cfg_path) if self._config.has_option('buildout', 'args'): args = self._config.get('buildout', 'args').split("\n") for arg in args: arg = arg.strip() if arg.startswith("'") and arg.endswith("'"): arg = arg[1:-1].replace("\\'", "'") elif arg.startswith('"') and arg.endswith('"'): arg = arg[1:-1].replace('\\"', '"') self.buildout_args.append(arg) (self.buildout_options, self.buildout_settings, _) = \ parse_buildout_args(self.buildout_args[1:]) if self._config.has_option('mr.developer', 'rewrites'): for rewrite in self._config.get('mr.developer', 'rewrites').split('\n'): self.rewrites.append(rewrite.split()) def save(self): self._config.remove_section('develop') self._config.add_section('develop') for package in sorted(self.develop): state = self.develop[package] if state is 'auto': self._config.set('develop', package, 'auto') elif state is True: self._config.set('develop', package, 'true') elif state is False: self._config.set('develop', package, 'false') if not self._config.has_section('buildout'): self._config.add_section('buildout') options, settings, args = parse_buildout_args(self.buildout_args[1:]) # don't store the options when a command was in there if not len(args): self._config.set('buildout', 'args', "\n".join(repr(x) for x in self.buildout_args)) if not self._config.has_section('mr.developer'): self._config.add_section('mr.developer') self._config.set('mr.developer', 'rewrites', "\n".join(" ".join(x) for x in self.rewrites)) self._config.write(open(self.cfg_path, "w"))
class ParamStore(object): def __init__(self, root_dir, file_name): self._lock = Lock() with self._lock: if not os.path.isdir(root_dir): os.makedirs(root_dir) self._path = os.path.join(root_dir, file_name) self._dirty = False # open config file self._config = RawConfigParser() self._config.read(self._path) def __del__(self): self.flush() def flush(self): if not self._dirty: return with self._lock: self._dirty = False of = open(self._path, 'w') self._config.write(of) of.close() def get(self, section, option, default=None): """Get a parameter value and return a string. If default is specified and section or option are not defined in the file, they are created and set to default, which is then the return value. """ with self._lock: if not self._config.has_option(section, option): if default is not None: self._set(section, option, default) return default return self._config.get(section, option) def get_datetime(self, section, option, default=None): result = self.get(section, option, default) if result: return safestrptime(result) return result def set(self, section, option, value): """Set option in section to string value.""" with self._lock: self._set(section, option, value) def _set(self, section, option, value): if not self._config.has_section(section): self._config.add_section(section) elif (self._config.has_option(section, option) and self._config.get(section, option) == value): return self._config.set(section, option, value) self._dirty = True def unset(self, section, option): """Remove option from section.""" with self._lock: if not self._config.has_section(section): return if self._config.has_option(section, option): self._config.remove_option(section, option) self._dirty = True if not self._config.options(section): self._config.remove_section(section) self._dirty = True
def read_auto_rx_config(filename): """ Read an Auto-RX v2 Station Configuration File. This function will attempt to parse a configuration file. It will also confirm the accessibility of any SDRs specified in the config file. Args: filename (str): Filename of the configuration file to read. Returns: auto_rx_config (dict): The configuration dictionary. sdr_config (dict): A dictionary with SDR parameters. """ global global_config # Configuration Defaults: auto_rx_config = { # Log Settings 'per_sonde_log' : True, # Email Settings 'email_enabled': False, 'email_smtp_server': 'localhost', 'email_from': 'sonde@localhost', 'email_to': None, # SDR Settings 'sdr_fm': 'rtl_fm', 'sdr_power': 'rtl_power', 'sdr_quantity': 1, # Search Parameters 'min_freq' : 400.4, 'max_freq' : 404.0, 'rx_timeout' : 120, 'whitelist' : [], 'blacklist' : [], 'greylist' : [], # Location Settings 'station_lat' : 0.0, 'station_lon' : 0.0, 'station_alt' : 0.0, # Position Filter Settings 'max_altitude' : 50000, 'max_radius_km' : 1000, # Habitat Settings 'habitat_enabled': False, 'habitat_upload_rate': 30, 'habitat_uploader_callsign': 'SONDE_AUTO_RX', 'habitat_uploader_antenna': '1/4-wave', 'habitat_upload_listener_position': False, 'habitat_payload_callsign': '<id>', # APRS Settings 'aprs_enabled' : False, 'aprs_upload_rate': 30, 'aprs_user' : 'N0CALL', 'aprs_pass' : '00000', 'aprs_server' : 'rotate.aprs2.net', 'aprs_object_id': '<id>', 'aprs_custom_comment': 'Radiosonde Auto-RX <freq>', 'aprs_position_report': False, 'station_beacon_enabled': False, 'station_beacon_rate': 30, 'station_beacon_comment': "radiosonde_auto_rx SondeGate v<version>", 'station_beacon_icon': '/r', # Web Settings, 'web_port' : 5000, 'web_archive_age': 120, # Advanced Parameters 'search_step' : 800, 'snr_threshold' : 10, 'min_distance' : 1000, 'dwell_time' : 10, 'max_peaks' : 10, 'quantization' : 10000, 'synchronous_upload' : False, 'scan_dwell_time' : 20, 'detect_dwell_time' : 5, 'scan_delay' : 10, 'payload_id_valid' : 5, # Rotator Settings 'enable_rotator': False, 'rotator_update_rate': 30, 'rotator_hostname': '127.0.0.1', 'rotator_port' : 4533, 'rotation_threshold': 5.0, 'rotator_homing_enabled': False, 'rotator_homing_delay': 10, 'rotator_home_azimuth': 0, 'rotator_home_elevation': 0, # OziExplorer Settings 'ozi_enabled' : False, 'ozi_update_rate': 5, 'ozi_port' : 55681, 'payload_summary_enabled': False, 'payload_summary_port' : 55672 } sdr_settings = {}#'0':{'ppm':0, 'gain':-1, 'bias': False}} try: config = RawConfigParser(auto_rx_config) config.read(filename) # Log Settings auto_rx_config['per_sonde_log'] = config.getboolean('logging', 'per_sonde_log') # Email Settings if config.has_option('email', 'email_enabled'): try: auto_rx_config['email_enabled'] = config.getboolean('email', 'email_enabled') auto_rx_config['email_smtp_server'] = config.get('email', 'smtp_server') auto_rx_config['email_from'] = config.get('email', 'from') auto_rx_config['email_to'] = config.get('email', 'to') except: logging.error("Config - Invalid email settings. Disabling.") auto_rx_config['email_enabled'] = False # SDR Settings auto_rx_config['sdr_fm'] = config.get('advanced', 'sdr_fm_path') auto_rx_config['sdr_power'] = config.get('advanced', 'sdr_power_path') auto_rx_config['sdr_quantity'] = config.getint('sdr', 'sdr_quantity') # Search Parameters auto_rx_config['min_freq'] = config.getfloat('search_params', 'min_freq') auto_rx_config['max_freq'] = config.getfloat('search_params', 'max_freq') auto_rx_config['rx_timeout'] = config.getint('search_params', 'rx_timeout') auto_rx_config['whitelist'] = json.loads(config.get('search_params', 'whitelist')) auto_rx_config['blacklist'] = json.loads(config.get('search_params', 'blacklist')) auto_rx_config['greylist'] = json.loads(config.get('search_params', 'greylist')) # Location Settings auto_rx_config['station_lat'] = config.getfloat('location', 'station_lat') auto_rx_config['station_lon'] = config.getfloat('location', 'station_lon') auto_rx_config['station_alt'] = config.getfloat('location', 'station_alt') # Position Filtering auto_rx_config['max_altitude'] = config.getint('filtering', 'max_altitude') auto_rx_config['max_radius_km'] = config.getint('filtering', 'max_radius_km') # Habitat Settings auto_rx_config['habitat_enabled'] = config.getboolean('habitat', 'habitat_enabled') auto_rx_config['habitat_upload_rate'] = config.getint('habitat', 'upload_rate') auto_rx_config['habitat_payload_callsign'] = config.get('habitat', 'payload_callsign') auto_rx_config['habitat_uploader_callsign'] = config.get('habitat', 'uploader_callsign') auto_rx_config['habitat_upload_listener_position'] = config.getboolean('habitat','upload_listener_position') # APRS Settings auto_rx_config['aprs_enabled'] = config.getboolean('aprs', 'aprs_enabled') auto_rx_config['aprs_upload_rate'] = config.getint('aprs', 'upload_rate') auto_rx_config['aprs_user'] = config.get('aprs', 'aprs_user') auto_rx_config['aprs_pass'] = config.get('aprs', 'aprs_pass') auto_rx_config['aprs_server'] = config.get('aprs', 'aprs_server') auto_rx_config['aprs_object_id'] = config.get('aprs', 'aprs_object_id') auto_rx_config['aprs_custom_comment'] = config.get('aprs', 'aprs_custom_comment') # OziPlotter Settings auto_rx_config['ozi_enabled'] = config.getboolean('oziplotter', 'ozi_enabled') auto_rx_config['ozi_update_rate'] = config.getint('oziplotter', 'ozi_update_rate') auto_rx_config['ozi_port'] = config.getint('oziplotter', 'ozi_port') auto_rx_config['payload_summary_enabled'] = config.getboolean('oziplotter', 'payload_summary_enabled') auto_rx_config['payload_summary_port'] = config.getint('oziplotter', 'payload_summary_port') # Advanced Settings auto_rx_config['search_step'] = config.getfloat('advanced', 'search_step') auto_rx_config['snr_threshold'] = config.getfloat('advanced', 'snr_threshold') auto_rx_config['min_distance'] = config.getfloat('advanced', 'min_distance') auto_rx_config['dwell_time'] = config.getint('advanced', 'dwell_time') auto_rx_config['quantization'] = config.getint('advanced', 'quantization') auto_rx_config['max_peaks'] = config.getint('advanced', 'max_peaks') auto_rx_config['scan_dwell_time'] = config.getint('advanced', 'scan_dwell_time') auto_rx_config['detect_dwell_time'] = config.getint('advanced', 'detect_dwell_time') auto_rx_config['scan_delay'] = config.getint('advanced', 'scan_delay') auto_rx_config['payload_id_valid'] = config.getint('advanced', 'payload_id_valid') auto_rx_config['synchronous_upload'] = config.getboolean('advanced', 'synchronous_upload') # Rotator Settings auto_rx_config['rotator_enabled'] = config.getboolean('rotator','rotator_enabled') auto_rx_config['rotator_update_rate'] = config.getint('rotator', 'update_rate') auto_rx_config['rotator_hostname'] = config.get('rotator', 'rotator_hostname') auto_rx_config['rotator_port'] = config.getint('rotator', 'rotator_port') auto_rx_config['rotator_homing_enabled'] = config.getboolean('rotator', 'rotator_homing_enabled') auto_rx_config['rotator_home_azimuth'] = config.getfloat('rotator', 'rotator_home_azimuth') auto_rx_config['rotator_home_elevation'] = config.getfloat('rotator', 'rotator_home_elevation') # New setting in this version (20180616). Keep it in a try-catch to avoid bombing out if the new setting isn't present. try: auto_rx_config['habitat_uploader_antenna'] = config.get('habitat', 'uploader_antenna').strip() except: logging.error("Config - Missing uploader_antenna setting. Using default.") auto_rx_config['habitat_uploader_antenna'] = '1/4-wave' # New settings added in 20180624. try: auto_rx_config['web_port'] = config.getint('web', 'web_port') auto_rx_config['web_archive_age'] = config.getint('web', 'archive_age') except: logging.error("Config - Missing Web Server settings. Using defaults.") auto_rx_config['web_port'] = 5000 auto_rx_config['web_archive_age'] = 120 # New setting added in 201810xx (Rotator updates) try: auto_rx_config['rotator_homing_delay'] = config.getint('rotator', 'rotator_homing_delay') auto_rx_config['rotation_threshold'] = config.getfloat('rotator', 'rotation_threshold') except: logging.error("Config - Missing new rotator settings, using defaults.") # New APRS Station Beaconing settings added in 201812xx try: auto_rx_config['aprs_position_report'] = config.getboolean('aprs','aprs_position_report') auto_rx_config['station_beacon_enabled'] = config.getboolean('aprs','station_beacon_enabled') auto_rx_config['station_beacon_rate'] = config.getint('aprs', 'station_beacon_rate') auto_rx_config['station_beacon_comment'] = config.get('aprs', 'station_beacon_comment') auto_rx_config['station_beacon_icon'] = config.get('aprs', 'station_beacon_icon') if auto_rx_config['station_beacon_enabled'] and auto_rx_config['station_lat']==0.0 and auto_rx_config['station_lon'] == 0.0: auto_rx_config['station_beacon_enabled'] = False logging.error("Config - Disable APRS Station beacon, as no station lat/lon set.") except: logging.error("Config - APRS Station Beacon settings missing, using defaults.") # Now we attempt to read in the individual SDR parameters. auto_rx_config['sdr_settings'] = {} for _n in range(1,auto_rx_config['sdr_quantity']+1): _section = "sdr_%d" % _n try: _device_idx = config.get(_section,'device_idx') _ppm = config.getint(_section, 'ppm') _gain = config.getfloat(_section, 'gain') _bias = config.getboolean(_section, 'bias') if (auto_rx_config['sdr_quantity'] > 1) and (_device_idx == '0'): logging.critical("Config - SDR Device ID of 0 used with a multi-SDR configuration. Go read the warning in the config file!") return None # See if the SDR exists. _sdr_valid = rtlsdr_test(_device_idx) if _sdr_valid: auto_rx_config['sdr_settings'][_device_idx] = {'ppm':_ppm, 'gain':_gain, 'bias':_bias, 'in_use': False, 'task': None} logging.info('Config - Tested SDR #%s OK' % _device_idx) else: logging.warning("Config - SDR #%s invalid." % _device_idx) except Exception as e: logging.error("Config - Error parsing SDR %d config - %s" % (_n,str(e))) continue # Sanity checks when using more than one SDR if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['habitat_payload_callsign'] != "<id>"): logging.critical("Fixed Habitat Payload callsign used in a multi-SDR configuration. Go read the warnings in the config file!") return None if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['aprs_object_id'] != "<id>"): logging.critical("Fixed APRS object ID used in a multi-SDR configuration. Go read the warnings in the config file!") return None if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['rotator_enabled']): logging.critical("Rotator enabled in a multi-SDR configuration. Go read the warnings in the config file!") return None # TODO: Revisit this limitation once the OziPlotter output sub-module is complete. if (len(auto_rx_config['sdr_settings'].keys()) > 1) and auto_rx_config['ozi_enabled']: logging.critical("Oziplotter output enabled in a multi-SDR configuration.") return None if len(auto_rx_config['sdr_settings'].keys()) == 0: # We have no SDRs to use!! logging.error("Config - No working SDRs! Cannot run...") return None else: # Create a global copy of the configuration file at this point global_config = copy.deepcopy(auto_rx_config) return auto_rx_config except: traceback.print_exc() logging.error("Could not parse config file.") return None
config.read(cfg_path) dirty = False if not config.has_section('general'): config.add_section('general') # Change this if you make backwards-incompatible changes to the # section and key naming in the config file. config.set('general', 'cfg_schema', 1) dirty = True for key, val in DEFAULTS['general'].items(): if not config.has_option('general', key): config.set('general', key, str(val)) dirty = True modkeys = config.get('general', 'ModMask').split() movemodkeys = config.get('general', 'MoveModMask').split() # Either load the keybindings or use and save the defaults if config.has_section('keys'): keymap = dict(config.items('keys')) else: keymap = DEFAULTS['keys'] config.add_section('keys') for row in keymap.items(): config.set('keys', row[0], row[1]) dirty = True if dirty: cfg_file = file(cfg_path, 'wb') config.write(cfg_file)
# All rights reserved. # # File coded by: Pavol Juhas # # See AUTHORS.txt for a list of people who contributed. # See LICENSE_DANSE.txt for license information. # ############################################################################## """Definition of __version__, __date__, __gitsha__. """ from pkg_resources import resource_filename from ConfigParser import RawConfigParser # obtain version information from the version.cfg file cp = RawConfigParser(dict(version='', date='', commit='', timestamp=0)) if not cp.read(resource_filename(__name__, 'version.cfg')): from warnings import warn warn('Package metadata not found, execute "./setup.py egg_info".') __version__ = cp.get('DEFAULT', 'version') __date__ = cp.get('DEFAULT', 'date') __gitsha__ = cp.get('DEFAULT', 'commit') __timestamp__ = cp.getint('DEFAULT', 'timestamp') del cp # End of file
logging.basicConfig(level=logging.DEBUG) _ver = sys.version_info is_py2 = (_ver[0] == 2) is_py3 = (_ver[0] == 3) if is_py2: from ConfigParser import RawConfigParser as ConfigParser # TODO When wx library support Pthon3,use the code. # if is_py3: # from configparser import ConfigParser config = ConfigParser() try: config.read("conf.cfg") enable_copy_script = config.get("DEFAULT", "EnableCopyScript") select_answer_script_template = config.get("DEFAULT", "SelectAnswerScriptTemplate") find_question_regex = config.get("DEFAULT", "FindQuestionRegEx") online_question_bank_url = config.get("DEFAULT", "OnlineQuestionBankURL") except: enable_copy_script = 'document.body.onselectstart="";document.body.oncopy="";document.body.oncut="";document.body.oncontextmenu="";' select_answer_script_template = 'var strQuestionIds0="%s";var strQuestionAnswer0="%s";var strQuestionAnsers=strQuestionAnswer0.split(",");var questionIds=strQuestionIds0.split(",");var objRightCount=0;if(strQuestionAnsers.length>0){for(var i=0;i<questionIds.length;i++){if(strQuestionAnsers[i]!="0"){var name="radio_"+questionIds[i];var objs=document.getElementsByName(name);for(var j=0;j<objs.length;j++){if(objs[j].value==strQuestionAnsers[i]){objs[j].checked=true;var span=document.getElementById("correctAnswer_"+questionIds[i]);var span_right=document.getElementById("span_right_"+questionIds[i]);var span_wrong=document.getElementById("span_wrong_"+questionIds[i]);if(span!=null&&span_right!=null&&span_wrong!=null){if(span.innerText==strQuestionAnsers[i]){span_right.style.display="";span_wrong.style.display="none";objRightCount++;}else{span_right.style.display="none";span_wrong.style.display="";}}}}}}}doCommit(1,0);' __author__ = u"Qinet" __title__ = u"锦诚网助手" __version__ = u'4.0.1' __update__ = u"2016/11/23" _HAS_FUND_MESSAGE = u"答案仅供参考!\n\n答题完毕后请随机抽取3-5个题目,检查与网上答案是否一致!\n" _NOT_FIND_MESSAGE = u"未找到题目!\n"u"请确保在 考! 试! 页! 面! 点击一键答题!!! \n\n"u"已知问题:个别Win7系统可能无法解析,请更换电脑后再次尝试!\n"u"温馨提示:Win8/Win10成功率更高哦!\n"
def parse_module_definition(mod_info): print "Parsing Modules..." database_names = set() for mod_def, mod_data in mod_info.items(): mod_name = mod_def parser = RawConfigParser() parser.read(mod_def) query_name = parser.get('Query Metadata', 'QUERY_NAME') database_name = parser.get('Database Metadata', 'DATABASE').split(',') activity = parser.get('Query Metadata', 'ACTIVITY') key_timestamp = parser.get('Query Metadata', 'KEY_TIMESTAMP') for database in database_name: database_names.add(database) if version == 'yolo': for section in parser.sections(): try: if "SQL Query" in section: sql_query = parser.get(section, 'QUERY') mod_info[mod_def] = [ query_name, database, activity, key_timestamp, sql_query ] except: pass else: for section in parser.sections(): try: if version in section: sql_query = parser.get(section, 'QUERY') mod_info[mod_def] = [ query_name, database, activity, key_timestamp, sql_query ] except: pass print "Parsing: ", len(mod_info), " modules." print "Searching for database files..." print for root, dirs, filenames in os.walk(data_dir): for f in filenames: if f in database_names: for mod_def, mod_data in mod_info.items(): if mod_data: if mod_data[1] == f: mod_info[mod_def].append(os.path.join(root, f)) for mod_def, mod_data in mod_info.items(): if mod_data: print mod_def, ":", len(mod_data) - 5, "databases." run_module(mod_def, mod_data[0], mod_data[5:], mod_data[2], mod_data[3], mod_data[4]) print else: print mod_def, ": Module not supported for version of data provided." print
s = round(size / p, 2) if (s > 0): return '%s %s' % (s, size_name[i]) else: return '0B' if __name__ == '__main__': reload(sys) # sys.setdefaultencoding('utf8') conf = RawConfigParser() conf.read('config.ini') DUT_IP = conf.get('ip', 'Dut_ip1') Bridge_IP = conf.get('Bridge_ip', 'Bridge_IP') Account_password = conf.get('Accountpassword', 'Account_password1') save_profile = os.getcwd() + '//Profile' save_result = os.getcwd() + '//Result//Throughput2g.txt' chrome_path = os.getcwd() + '//chromedriver_win32//chromedriver.exe' chariot_path = os.getcwd() + '/Chariot_tst' driver = webdriver.Chrome(chrome_path)
def parse_setup_cfg(): cfg = RawConfigParser() r = cfg.read([os.path.join(ROOTDIR, 'setup.cfg')]) if len(r) != 1: print("Cannot read 'setup.cfg'") sys.exit(1) metadata = dict( name=cfg.get('metadata', 'name'), version=cfg.get('metadata', 'version'), description=cfg.get('metadata', 'description'), ) _opt_value(cfg, metadata, 'metadata', 'license') _opt_value(cfg, metadata, 'metadata', 'maintainer') _opt_value(cfg, metadata, 'metadata', 'maintainer_email') _opt_value(cfg, metadata, 'metadata', 'author') _opt_value(cfg, metadata, 'metadata', 'author_email') _opt_value(cfg, metadata, 'metadata', 'url') _opt_value(cfg, metadata, 'metadata', 'download_url') _opt_value(cfg, metadata, 'metadata', 'classifiers', _as_lines) _opt_value(cfg, metadata, 'metadata', 'platforms', _as_list) _opt_value(cfg, metadata, 'metadata', 'packages', _as_list) _opt_value(cfg, metadata, 'metadata', 'keywords', _as_list) try: v = cfg.get('metadata', 'requires-dist') except (NoOptionError, NoSectionError): pass else: requires = _as_requires(v) if requires: metadata['install_requires'] = requires try: v = cfg.get('metadata', 'requires-test') except (NoOptionError, NoSectionError): pass else: requires = _as_requires(v) if requires: metadata['tests_require'] = requires try: v = cfg.get('metadata', 'long_description_file') except (NoOptionError, NoSectionError): pass else: parts = [] for nm in v.split(): fp = open(nm, 'rU') parts.append(fp.read()) fp.close() metadata['long_description'] = '\n\n'.join(parts) try: v = cfg.get('metadata', 'zip-safe') except (NoOptionError, NoSectionError): pass else: metadata['zip_safe'] = _as_bool(v) try: v = cfg.get('metadata', 'console_scripts') except (NoOptionError, NoSectionError): pass else: if 'entry_points' not in metadata: metadata['entry_points'] = {} metadata['entry_points']['console_scripts'] = v.splitlines() if sys.version_info[:2] <= (2, 6): try: metadata['tests_require'] += ", unittest2" except KeyError: metadata['tests_require'] = "unittest2" return metadata
def upload(self, subdir, filepath): if self.build_upload == 'http': import requests from ConfigParser import RawConfigParser config = RawConfigParser() config.read(os.path.expanduser("~/.passwd")) data = { "password" : config.get('cgdo.ru', 'upload_password'), "subdir" : subdir, } files = { "file" : open(filepath, "rb"), } proxies = {} if self.use_proxy: proxies = { "http" : self.use_proxy, "https" : self.use_proxy, } sys.stdout.write("Uploading package '%s' to '%s'...\n" % (filepath, subdir)) requests.post("http://cgdo.ru/upload", files=files, data=data, proxies=proxies) elif self.build_upload == 'ftp': from ConfigParser import RawConfigParser config = RawConfigParser() config.read(os.path.expanduser("~/.passwd")) now = datetime.datetime.now() subdir = now.strftime("%Y%m%d") cmd = None if sys.platform == 'win32': ftpScriptFilepath = os.path.join(tempfile.gettempdir(), "blender_for_vray_upload.txt") with open(ftpScriptFilepath, 'w') as f: f.write('option batch abort\n') f.write('option confirm off\n') f.write('open ftp://%s:%s@%s -rawsettings ProxyMethod=%s ProxyHost=%s ProxyPort=%s\n' % ( config.get('nightlies.ftp', 'user'), config.get('nightlies.ftp', 'pass'), config.get('nightlies.ftp', 'host'), config.get('nightlies.ftp', 'proxy_type'), config.get('nightlies.ftp', 'proxy_host'), config.get('nightlies.ftp', 'proxy_port'), )) f.write('option transfer binary\n') f.write('put %s /%s/\n' % (filepath, subdir)) f.write('exit\n') f.write('\n') cmd = ['winscp'] cmd.append('/passive') cmd.append('/script="%s"' % ftpScriptFilepath) if not self.mode_test: os.system(' '.join(cmd)) else: cmd = ['curl'] cmd.append('--no-epsv') if self.use_proxy: cmd.append('--proxy') cmd.append(self.use_proxy) cmd.append('--user') cmd.append('%s:%s' % ( config.get('nightlies.ftp', 'user'), config.get('nightlies.ftp', 'pass'), )) cmd.append('--upload-file') cmd.append(filepath) cmd.append('ftp://%s/%s/' % ( config.get('nightlies.ftp', 'host'), subdir, )) if not self.mode_test: subprocess.call(cmd) if self.mode_test: print(' '.join(cmd))