def __init__(self, langcode = None): self.logger = logging.getLogger('lang') self.langpath = os.path.join(os.getcwd(), 'lang') if langcode == None: configfile = SafeConfigParser() configfile.read( os.path.expanduser('~/.fusion/fusion.cfg') ) self.langcode = configfile.get('lang', 'langcode') else: self.langcode = langcode fullpath = os.path.join(self.langpath, self.langcode + '.lang') if not os.path.isfile(fullpath): self.logger.warn('"%s.lang" dosn\'t exist. ' '"en.lang" is taken instead' % self.langcode) fullpath = os.path.join(self.langpath, 'en.lang') if not os.path.isfile(fullpath): self.logger.error('Default file dosn\'t exist either...') # Read Langfile self.langfile = SafeConfigParser() self.langfile.read(fullpath) self.logger.debug('Instance of lang created')
def get_config(): DEFAULTS = { 'main': { "player": "mplayer -fs {episode} [-sub {subs}]", }, } config = SafeConfigParser() if not os.path.exists(CONFIG_FILE): print "There is no config file. Creating default in %s" % CONFIG_FILE for section, options in DEFAULTS.items(): for section, options in DEFAULTS.items(): if not config.has_section(section): config.add_section(section) for option, value in options.items(): config.set(section, option, value) if not os.path.exists(APP_DATA_DIR): os.makedirs(APP_DATA_DIR) #make .mts dir with open(CONFIG_FILE, 'w') as cfg: config.write(cfg) else: with open(CONFIG_FILE, 'r') as cfg: config.readfp(cfg) return config
def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) self.parsed_url = parsed_url #URL string: atmos://host/path/ self.url_string = duplicity.backend.strip_auth_from_url(self.parsed_url) self.url_path = '/' + '/'.join(self.url_string.split('/')[3:]) host = self.url_string.split('/')[2].split(':')[0] #Hacks try: port = self.url_string.split('/')[2].split(':')[1] except Exception: port=443 pass parser = SafeConfigParser() parser.read('/etc/duplicity/atmos.ini') uid=parser.get(host, 'uid') secret=parser.get(host, 'secret') log.Debug("Parsed URL:" + self.url_string) #Init Atmos connection self.api = EsuRestApi( host, int(port), uid, secret ) # Use an explicit directory name. if self.url_string[-1] != '/': self.url_string += '/'
def run(command, global_options, options, args): if not args: command.parser.error('Missing instance directory.') instance_dir = args[0] sqlite_db_dir = '' settings_file = os.path.join(instance_dir, 'settings.ini') config = SafeConfigParser() if not config.read([settings_file]): command.parser.error('Settings file "%s" not found.' % settings_file) else: sqlite_db_dir = config.get('database', 'sqlite_db_dir') if not sqlite_db_dir: sqlite_db_dir = os.path.join(instance_dir, 'db') ecm_db_engine = config.get('database', 'ecm_engine') # run collectstatic collect_static_files(instance_dir, options) # run syncdb if 'sqlite' in ecm_db_engine and not os.path.exists(sqlite_db_dir): os.makedirs(sqlite_db_dir) init_ecm_db(instance_dir) log('') log('ECM instance initialized in "%s".' % instance_dir) print_load_message(instance_dir, ecm_db_engine)
def _create_configuration(self): """ Create and return ConfigParser object containing the device configurations Return: ConfigParser object containing the configurations """ logging.info("Creating configuration object") config = SafeConfigParser() device_ids = {} for device in self._devices: # lack of model generally means that there was an unused power # cutter socket if not "model" in device: continue if not device["model"] in device_ids: device_ids[device["model"]] = 1 dev_id = device_ids[device["model"]] device_ids[device["model"]] = dev_id + 1 section = device["model"].upper() + "_" + str(dev_id) config.add_section(section) for key in device: config.set(section, key, str(device[key])) return config
def get_config(config_file): """ Return an instance of ConfigParser.SafeConfigParser, loaded with the data in a) $LPBS_HOME/lpbs.cfg b) $HOME/.lpbs.cfg c) the specified config_file """ config = SafeConfigParser() # Defaults config.readfp(DEFAULTS) config_files = [] # $LPBS_HOME/lpbs.cfg if os.environ.has_key('LPBS_HOME'): global_config_file = os.path.join(os.environ['LPBS_HOME'], 'lpbs.cfg') if os.path.isfile(global_config_file): config_files.append(global_config_file) # $HOME/.lpbs.cfg if os.environ.has_key('HOME'): user_config_file = os.path.join(os.environ['HOME'], '.lpbs.cfg') if os.path.isfile(user_config_file): config_files.append(user_config_file) # Specified Config File try: if os.path.isfile(config_file): config_files.append(config_file) except TypeError: pass try: config.read(config_files) except ParsingError, error: print >> sys.stderr, str(error)
class ConfigurationHelper(object): def __init__(self): self.app_home = os.path.dirname(os.path.abspath(__file__)) os.chdir(self.app_home) self.config = SafeConfigParser() config_file = os.path.join(self.app_home,"config.conf") self.config.read(config_file)
def load_config(config_file): config = SafeConfigParser() config.read(config_file) sections = ['Server', 'Application', 'MongoDB'] the_config = {} for section in sections: options = config.options(section) for option in options: try: the_config[option] = config.get(section, option) try_next = True if try_next: try: the_config[option] = str2bool(the_config[option]) try_next = False except: try_next = True if try_next: try: the_config[option] = float(the_config[option]) try_next = False except: try_next = True except: print "Config exception on %s!" % option the_config[option] = None return the_config
def loadConfig(): global config config = SafeConfigParser() try: config.readfp(file(CONFIG)) except Exception, e: pass
def load_converters(self, path): """Loads converters from conv files in path and and populates ``self.converters`` and ``self.converter_map`` structures. :param path: a glob-bable path like ``/foo/bar/baz/*.conv`` """ platform = app.config.get(prefs.APP_PLATFORM) groups = glob(path) groups.sort() for group_definition in groups: parser = SafeConfigParser() definition_file = open(group_definition) try: parser.readfp(definition_file) defaults = parser.defaults() sections = parser.sections() group_converters = list() for section in sections: converter_info = ConverterInfo(section, parser) if ((converter_info.platforms is None or platform in converter_info.platforms)): ident = converter_info.identifier self.converter_map[ident] = converter_info group_converters.append(converter_info) group_converters.sort(key=lambda x: x.name) self.converters.append((defaults['name'], group_converters)) finally: definition_file.close()
def remove_gridmap(dn): nimbus_home = get_nimbus_home() configpath = os.path.join(nimbus_home, 'nimbus-setup.conf') config = SafeConfigParser() if not config.read(configpath): raise CLIError('ENIMBUSHOME', "Failed to read config from '%s'. Has Nimbus been configured?" % configpath) gmf = config.get('nimbussetup', 'gridmap') gmf = os.path.join(nimbus_home, gmf) found = False f = open(gmf, 'r') (nf, new_name) = tempfile.mkstemp(dir=nimbus_home+"/var", prefix="gridmap", text=True) for l in f.readlines(): l = l.strip() if l == "": continue a = shlex.split(l) if dn == a[0]: found = True else: os.write(nf, l) os.write(nf, os.linesep) if not found: print "WARNING! user not found in %s" % (dn) os.close(nf) f.close() os.unlink(gmf) os.rename(new_name, gmf)
def load(self): self._changed = os.path.getmtime(self._config_file) cp = ConfigParser() cp.read(self._config_file) self.config_dict = dict(cp.items("global"))
def __init__(self, file='C:\plexus\config.ini', part_number="",hwrev="",ccode="", operatorID="", mac="",product="" ): SafeConfigParser.__init__(self) if os.path.isfile( file ): self.no_log = False else: self.no_log = True print "PlexusLog config file not found: %s" % file self.file = file self.read(file) logging.debug("loading: %s " % self.file) self.operatorID = operatorID self.mac = ''.join(mac) self.AssemblyNo = "113-%s-%s-%s" % ( part_number, hwrev, ccode) self.FixtureID = self.get_data('Datalog','FixtureID','NoFixture') self.SystemID = self.get_data('Datalog','SystemID','NoSystemID') self.Extension = self.get_data('Datalog','Extension','NoExtension') self.EventTyp = self.get_data('Datalog','EventTyp','NoEventTyp') self.ProcessTyp = self.get_data('Datalog','ProcessTyp','NoProcessTyp') # self.TestSystemID = self.get_data('Datalog','TestSystemID','NoTestSystemID') self.CustomerID = self.get_data('Datalog','CustomerID','NoCustomerID') self.EcRevLvl = hwrev self.product = product self.TestSystemID = self.product + self.ProcessTyp + self.SystemID default_path = file.replace('config.ini','Log') self.Result_path = self.get_data('Path','Result_path', default_path) if os.path.isdir( self.Result_path ) == False: self.no_log = True print "PlexusLog path not found: %s" % self.Result_path
def __init__(self, filename): # Do singleton checking. if Config.INSTANCE != None: raise Exception('Singleton error: an instance of Config already exists.') # Create the parent dir if it does not exist. if not os.path.exists(filename): dirname = os.path.dirname(filename) if dirname != '': os.mkdir(dirname) # Initialize the config by reading in the file and then checking if # standard values must be plugged in... SafeConfigParser.__init__(self) self._filename = filename self.read(self._filename) self._dirty = False self.set_defaults() # Write the config file if necessary. if self._dirty: with open(self._filename, 'wb') as configfile: self.write(configfile) self._dirty = False # Set the singleton pointer. Config.INSTANCE = self
def __init__(self, request): # Note that SafeConfigParser if not a new class so we have to explicitly # call the __init__method SafeConfigParser.__init__(self) try: host = request.session['host'] username = request.session['username'] user_conf = os.path.join(settings.USERCONFDIR, '%s@%s.conf' % (username,host)) if not os.path.isfile(user_conf): # Touch the user configuration file open(user_conf, 'w').close() server_conf = os.path.join(settings.SERVERCONFDIR, '%s.conf' % host ) config_files = [ settings.FACTORYCONF, settings.DEFAULTCONF, user_conf, server_conf, settings.SYSTEMCONF ] except KeyError: config_files = [ settings.FACTORYCONF, settings.DEFAULTCONF ] self.read( config_files )
def checkAppINI(appLocation): if (os.path.isdir(appLocation)): zipApp = None appINIPath = os.path.join(appLocation, "application.ini") if not (os.path.isfile(appINIPath)): raise Exception(appINIPath + " does not exist") appINI = open(appINIPath) elif (zipfile.is_zipfile(appLocation)): zipApp = zipfile.ZipFile(appLocation) if not ("application.ini" in zipApp.namelist()): raise Exception("jar:" + appLocation + "!/application.ini does not exist") appINI = zipApp.open("application.ini") else: raise Exception("appLocation must be a directory containing application.ini or a zip file with application.ini at its root") # application.ini verification iniparser = SafeConfigParser() iniparser.readfp(appINI) if not (iniparser.has_section("App")): raise Exception("application.ini must have an App section") if not (iniparser.has_section("Gecko")): raise Exception("application.ini must have a Gecko section") requireINIOption(iniparser, "App", "Name") requireINIOption(iniparser, "App", "Version") requireINIOption(iniparser, "App", "BuildID") requireINIOption(iniparser, "App", "ID") requireINIOption(iniparser, "Gecko", "MinVersion") return zipApp, iniparser pass
def command(): if len(sys.argv) != 3: print 'Invalid arguments' print usage sys.exit(1) command = sys.argv[1] config_file = sys.argv[2] if not os.path.isfile(config_file): print 'Invalid Config File' print usage sys.exit(1) config_file = os.path.abspath(config_file) parser = SafeConfigParser() parser.read([config_file]) fileConfig([config_file]) # TODO: This should check for loggin config # and if not present set to sane defaults if parser.has_option('generate:main', 'base'): base = parser.get_option('generate:main', 'base') else: base = os.path.dirname(config_file) if command == 'generate': generate(parser, base) elif command == 'serve': serve(parser, base) else: print 'Invalid Command' print usage sys.exit(1)
def parseConfig(self, cfg, prefix=DEFAULT_OPT_PREFIX, section='DEFAULT'): '''Read config settings from a file, config parser object or dict @type cfg: basestring / ConfigParser derived type / dict @param cfg: configuration file path or ConfigParser type object @type prefix: basestring @param prefix: prefix for option names e.g. "attributeQuery." @type section: basetring @param section: configuration file section from which to extract parameters. ''' if isinstance(cfg, basestring): cfgFilePath = path.expandvars(cfg) # Add a 'here' helper option for setting dir paths in the config # file hereDir = path.abspath(path.dirname(cfgFilePath)) _cfg = SafeConfigParser(defaults={'here': hereDir}) # Make option name reading case sensitive _cfg.optionxform = str _cfg.read(cfgFilePath) items = _cfg.items(section) elif isinstance(cfg, ConfigParser): items = cfg.items(section) elif isinstance(cfg, dict): items = cfg.items() else: raise AttributeError('Expecting basestring, ConfigParser or dict ' 'type for "cfg" attribute; got %r type' % type(cfg)) self.__parseFromItems(items, prefix=prefix)
def __init__(self, *args, **kwargs): luigi.Task.__init__(self, *args, **kwargs) parser = SafeConfigParser() parser.read(self.conf) root = parser.get("basic", "root") self.external = '%s/data/temp/paper.csv' % root self.paper = '%s/data/temp/paper.sf' % root
def set(self, section, key, value): old = SafeConfigParser.get(self, section, key) if value != old: self._updated[key] = section, key, value return SafeConfigParser.set(self, section, key, value)
def configopts(section, prog=None, opts=[]): from ConfigParser import SafeConfigParser, NoSectionError if prog: prog = prog.split('/')[-1] prog = prog[:-3] if prog.endswith('.py') else prog defaults = {'prog': prog} else: defaults = {} try: defaults.update([('user', os.environ['USER']), ('pwd', os.environ['PWD'])]) except KeyError: pass for (key, value) in opts: defaults[key.lower()] = value parser = SafeConfigParser(defaults) parser.read(['/etc/dumbo.conf', os.environ['HOME'] + '/.dumborc']) (results, excludes) = ([], set(defaults.iterkeys())) try: for (key, value) in parser.items(section): if not key.lower() in excludes: results.append((key.split('_', 1)[0], value)) except NoSectionError: pass return results
def parse_config(args): settings = {} parser = SafeConfigParser() config_precedence = [ '/etc/sprintstats.cfg', '/usr/local/etc/sprintstats.cfg', '~/.sprintstats.cfg', 'config.cfg' ] config_file = None for f in config_precedence: f = os.path.expanduser(f) if os.path.exists(f): config_file = f if args and args.config: if os.path.exists(args.config): config_file = args.config else: print('WARNING: Specified config file {0} not found'.format( (os.path.abspath(args.config)))) if config_file and os.path.exists(config_file): print( 'Using {0} instead.'.format(os.path.abspath(config_file))) if config_file and os.path.exists(config_file): parser.read(config_file) if parser.has_section('default'): settings = dict(parser.items('default')) if 'default_points' not in settings: settings['default_points'] = 0 return settings
def read_info_file(zipfile, path, section): """Return a dictionary matching the contents of the config file at path in zipfile""" cp = SafeConfigParser() info = StringIO(zipfile.read(path)) cp.readfp(info) return dict(cp.items(section))
def _read_config(suite): """ Read the config ini file for the test suite `suite` (identified as a group in the config) Returns a dictionary of configuration values that will be passed as environment variables to the test suite. """ if not os.path.isfile(CONFIG_PATH): msg = """ Could not find config file at '{0}'. Please set the CONFIG_PATH environment variable """.format(CONFIG_PATH) _abort(msg) config = SafeConfigParser() config.read(CONFIG_PATH) result = { key: config.get(suite, key) for key in config.options(suite) } # Validate the required keys for key in ['protocol', 'test_host']: if key not in result: _abort("Missing '{0}' in config file.".format(key)) return result
def getConfig(): """ parse config files in ../conf/ """ parser = SafeConfigParser() pluginConfigFiles = glob("../conf/accesskeys/*.conf") mainConfigFile = glob("../conf/*.conf") return parser.read(pluginConfigFiles), parser.read(mainConfigFile),\ parser
def _setup_toolbarbox(self): toolbarbox = ToolbarBox() self.set_toolbar_box(toolbarbox) toolbarbox.toolbar.insert(ActivityToolbarButton(self), -1) toolbarbox.toolbar.insert(Gtk.SeparatorToolItem(), -1) toolbarbox.toolbar.insert(StopButton(self), -1) names = "" parser = SafeConfigParser() parser.read("config.ini") for data in [("dic", "A"), ("dic", "E"), ("dic", "I"), ("dic", "O"), ("dic", "U"), ("dic", "Y"), ("dic", "G")]: names += parser.get(*data) + " " names = names[:-1] help_button = HelpButton() help_button.add_paragraph(_("Para traducir algo, debes escribirlo en la entrada de abajo y luego presionar enter.")) help_button.add_paragraph(_("También puedes ver las traducciones abajo.")) help_button.add_paragraph(_("Al precionar uno de los botones (%s), su texto se insertará en la entrada de escritura" % names)) toolbarbox.toolbar.insert(help_button, 2) separator = Gtk.SeparatorToolItem() separator.props.draw = False separator.set_expand(True) toolbarbox.toolbar.insert(separator, 3) toolbarbox.show_all() toolbarbox.toolbar.show_all()
class Config(object): """use singleton avoid global variables""" __metaclass__ = Singleton SECTION_NAME = 'main' ACTUAL_CONFIG_FILE = None DEFAULT_CONFIG_FILE = abspath(join(dirname(__file__), '../conf/config.ini')) def __init__(self): self.load_config() def load_config(self): config_file = self.__class__.ACTUAL_CONFIG_FILE or self.__class__.DEFAULT_CONFIG_FILE self._cfg = SafeConfigParser() self._cfg.read([config_file, ]) def get(self, option, section=None, value_type=str): return self._cfg._get(section or self.__class__.SECTION_NAME, value_type, option) def __getattr__(self, option): try: return self.get(option) except NoOptionError as e: print str(e) return None
def main(): '''main function''' parser = argparse.ArgumentParser( description='''Convert Cura slicing profile to Slic3r''') parser.add_argument('cura', metavar='INPUT', type=str, nargs='*', help='Cura input profile file') parser.add_argument('-o', '--out', metavar='OUTPUT', dest='slic3r', type=str, nargs='?', help='Slic3r output profile file') parser.add_argument('-d', '--dest', dest='dest', type=str, help='Set destination folder, eg. ../dest/') args = parser.parse_args() curaFiles = args.cura slic3rFile = args.slic3r dest = args.dest for f in curaFiles: cura = SafeConfigParser() cura.read(curaFiles) slic3r = convert(cura._sections['profile']) srt = collections.OrderedDict(sorted(slic3r.items())) #When output is not specified, print profile to stdout if slic3rFile == None and dest == None: print('# Slic3r profile converted from Cura {0!s}\n'.format(f)) for k in srt: print("{0!s} = {1!s}".format(k, slic3r[k])) else: if dest != None: slic3rFile = dest + f with open(slic3rFile, 'w') as cf: cf.write( '# Slic3r profile converted from Cura {0!s}\n'.format(f)) for k in srt: cf.write("{0!s} = {1!s}\n".format(k, slic3r[k])) print('Profile has been converted.')
def process_config(configfile_path): global api_token global api_url parser = SafeConfigParser() parser.read(configfile_path) error_count = 0 potential_api_url = parser.get('api_settings', 'api_url') if potential_api_url == '': LOG.error('Configuration file needs valid api_url') error_count += 1 else: api_url = potential_api_url potential_api_token = parser.get('api_settings', 'api_token') if potential_api_token == "": LOG.error('Configuration file needs valid api_token') error_count += 1 else: api_token = potential_api_token if error_count > 0: sys.exit()
def get_defaults(): from ConfigParser import SafeConfigParser, DEFAULTSECT cfg = SafeConfigParser() cfg_file = os.path.expanduser("~/.zenoss_aws.cfg") if os.path.exists(cfg_file): # Fake out SafeConfigParser by writing a default section cfg_contents = "[%s]\n" % DEFAULTSECT with open(cfg_file) as f: cfg_contents += f.read() from cStringIO import StringIO cfg.readfp(StringIO(cfg_contents)) def cfg_get(key, default=None): if cfg.has_option(DEFAULTSECT, key): return cfg.get(DEFAULTSECT, key) return default # # Option precedence: # # Config File -> Environment -> Command-line Options # # in all cases except for the 'USER' env var, which will always be set so # we prefer the config file setting before looking at the USER env var. # env_get = os.environ.get return { 'access_key': env_get('AWS_ACCESS_KEY', cfg_get('access_key')), 'secret_key': env_get('AWS_SECRET_KEY', cfg_get('secret_key')), 'username': env_get('AWS_USERNAME', cfg_get('username', env_get('USER'))), 'department': env_get('DEPARTMENT', cfg_get('department')), 'key_name': env_get('AWS_KEY_NAME', cfg_get('key_name')), }
def main(argv): # Print out the tool version and blurb. print """ ossams-parser.py http://www.ossams.com Parses security tool output and imports the data to a database, by Adrien de Beaupre. Version 0.09, 16 October 2011, Copyright Intru-Shun.ca Inc. 2011. Usage: ossams-parser.py configfile.conf (default is ossams.conf) """ # Declare some variables global db global dbconnection global projectname global projectid # Listing of currently supported tools: toollist = [ 'acunetix', 'burp', 'grendel', 'nessus', 'netsparker', 'nexposec', 'nikto', 'nmap', 'ratproxy', 'retinac', 'skipfish', 'sslscan', 'w3af', 'wapiti', 'watcher', 'websecurify', 'zap' ] # If there is a program calling argument it should be the conf file to use. if len(sys.argv) == 2: configurationfile = sys.argv[1] else: #Otherwise use the default configuration file ossams.conf configurationfile = 'ossams.conf' # Check to see if the conf file exists. if os.path.isfile(configurationfile): # Use ConfigParser to grab the configuration file options. confparser = SafeConfigParser() # Read the configuration file confparser.read(configurationfile) if confparser.has_section('mysql'): # Grab the MySQL database connection options if confparser.has_option('mysql', 'username'): dbuser = confparser.get('mysql', 'username') else: # Use a default if not in the conf file dbuser = '******' if confparser.has_option('mysql', 'password'): dbpasswd = confparser.get('mysql', 'password') else: # Use a default if not in the conf file dbpasswd = 'password' if confparser.has_option('mysql', 'host'): dbhost = confparser.get('mysql', 'host') else: # Use a default if not in the conf file dbhost = 'localhost' if confparser.has_option('mysql', 'port'): dbport = confparser.getint('mysql', 'port') else: # Use a default if not in the conf file dbport = '3306' if confparser.has_option('project', 'database'): dbname = confparser.get('project', 'database') else: # Use a default if not in the conf file dbname = 'ossams' else: # If we don't have MySQL parameters to use exit. print "The configuration file does not have the required mysql section values" sys.exit(1) # Get the conf file parameters for parsing if confparser.has_option('files', 'directory'): filedirectory = confparser.get('files', 'directory') if confparser.has_option('files', 'file'): filetoread = confparser.get('files', 'file') if confparser.has_option('files', 'list'): filelist = confparser.get('files', 'list') if confparser.has_option('files', 'extension'): extension = confparser.get('files', 'extension') else: # Use a default if not in the conf file extension = 'xml' if confparser.has_option('project', 'projectid'): projectid = confparser.get('project', 'projectid') else: # Use a default if not in the conf file projectid = "None" if confparser.has_option('project', 'projectname'): projectname = confparser.get('project', 'projectname') else: # Use a default if not in the conf file projectname = "None" if confparser.has_option('project', 'domain'): domain = confparser.get('project', 'domain') else: # Use a default if not in the conf file domain = "default" else: # Exit with a message if the configuration file isn't there. sys.exit("The configuration file does not appear to exist") # Use 'try' to catch database connection exceptions try: # Connect to the database dbconnection = MySQLdb.connect(host=dbhost, port=dbport, user=dbuser, passwd=dbpasswd) # db is the database connection instance db = dbconnection.cursor() # Use the OSSAMS database, or the one from the conf file db.execute("use %s;" % (dbname)) # If the database connection fails print an error and exit. except MySQLdb.Error, e: print "Error %d: %s" % (e.args[0], e.args[1]) sys.exit(1)
target_valence=normalized_sentiment) try: spotify_track_uri = spotify_response['tracks'][0]['uri'] except IndexError: # If the given genre doesn't exist, Spotify returns an empty list of tracks. app.logger.warning( 'No tracks returned from Spotify. Probably unknown genre: "' + genre + '"') abort(400) # ===== Build JSON Response ===== response = {} response[RESPONSE_KEY_POETRY] = poetry response[RESPONSE_KEY_TWEETS_READ_COUNT] = len(tweets_list) response[RESPONSE_KEY_AVG_SENTIMENT] = avg_sentiment response[RESPONSE_KEY_TRACK] = spotify_track_uri response[RESPONSE_KEY_GENRE] = genre return jsonify(response) if __name__ == "__main__": # Read config file conf = SafeConfigParser() conf.read(CONF_FILE_PATH) TWITTER_CONSUMER_KEY = conf.get('Twitter', 'consumer_key') TWITTER_CONSUMER_SECRET = conf.get('Twitter', 'consumer_secret') SPOTIFY_CLIENT_ID = conf.get('Spotify', 'client_id') SPOTIFY_CLIENT_SECRET = conf.get('Spotify', 'client_secret') app.debug = True # Do NOT use debug mode in production, it is not secure. app.run()
def getDecoderConfigInfo(): parser = SafeConfigParser() try: parser.read('config.cfg') path_version_file = str(parser.get('Decoder', 'PathToVersionTextFile')) print parser.get('Decoder', 'VersionStringName') print parser.get('Decoder', 'CURFilePath') print parser.get('Decoder', 'DefaultCPEID') print parser.get('Decoder', 'DefaultOffset') print parser.get('Decoder', 'LogstashPath') except Exception as e: logger.error("Error in reading decoder config %s", e)
and current_time - last_vibration_time > end_seconds): send_appliance_inactive_message() vibrating = current_time - last_vibration_time < 2 threading.Timer(1, heartbeat).start() if len(sys.argv) == 1: print "No config file specified" sys.exit() vibrating = False appliance_active = False last_vibration_time = time.time() start_vibration_time = last_vibration_time config = SafeConfigParser() config.read(sys.argv[1]) sensor_pin = config.getint('main', 'SENSOR_PIN') begin_seconds = config.getint('main', 'SECONDS_TO_START') end_seconds = config.getint('main', 'SECONDS_TO_END') pushbullet_api_key = config.get('pushbullet', 'API_KEY') pushover_user_key = config.get('pushover', 'user_api_key') pushover_app_key = config.get('pushover', 'app_api_key') pushover_device = config.get('pushover', 'device') pushover_sound = config.get('pushover', 'sound') mqtt_hostname = config.get('mqtt', 'mqtt_hostname') mqtt_port = config.get('mqtt', 'mqtt_port') mqtt_topic = config.get('mqtt', 'mqtt_topic') mqtt_username = config.get('mqtt', 'mqtt_username')
'''Add a database''' db = dbServer() db.dbSvr = config.get('environment', 'mysql.host') db.user = config.get('environment', 'mysql.cloud.user') db.passwd = config.get('environment', 'mysql.cloud.passwd') zs.dbSvr = db '''Add some configuration''' [zs.globalConfig.append(cfg) for cfg in getGlobalSettings(config)] ''''add loggers''' testLogger = logger() testLogger.logFolderPath = '/tmp/' zs.logger = testLogger return zs if __name__ == '__main__': parser = OptionParser() parser.add_option('-i', '--input', action='store', default='setup.properties', \ dest='input', help='file containing environment setup information') parser.add_option('-o', '--output', action='store', default='./sandbox.cfg', \ dest='output', help='path where environment json will be generated') (opts, args) = parser.parse_args() cfg_parser = SafeConfigParser() cfg_parser.read(opts.input) cfg = describeResources(cfg_parser) generate_setup_config(cfg, opts.output)
from xml.dom import minidom from httpcomm import HTTPComm from ConfigParser import SafeConfigParser import json from random import randrange import Queue import threading # Import JSON - compatible with Python<v2.6 try: import json except ImportError: import simplejson as json # Config parser pluginConfig = SafeConfigParser() pluginConfig.read(os.path.join(os.path.dirname(__file__), "config.ini")) # Various constants used throughout the script HANDLE = int(sys.argv[1]) ADDON = xbmcaddon.Addon(id=pluginConfig.get('plugin', 'id')) # Plugin constants __plugin__ = ADDON.getAddonInfo('name') __author__ = "Tim C. Steinmetz" __url__ = "http://qualisoft.dk/" __platform__ = "xbmc media center, [LINUX, OS X, WIN32]" __date__ = pluginConfig.get('plugin', 'date') __version__ = ADDON.getAddonInfo('version') """ Thread class used for scraping individual playlists for each channel
def new(config, name): """Prepare a job based on config file.""" cfg = SafeConfigParser() cfg.read(config) check_paths(cfg.items('path')) jobname = make_unique(name or "mlmpr_caffe") current_run = 1 cfg.add_section("status") cfg.set("status", "name", jobname) cfg.set("status", "current_run", str(current_run)) solver = generate_solver(cfg) cfg.set("status", "solver", solver) save_config(cfg) pbs = generate_pbs(cfg) submit(pbs)
def __init__(self, verbosity=1, printer=None): self.parser = SafeConfigParser() self.printer = printer or Printer(verbosity)
def read_config(): print 'Initialize...' parser = SafeConfigParser() parser.read('config.ini') return parser
class SmartConfig(object): """ Wrapper around SafeConfigParser to provide a nicer API """ def __init__(self, verbosity=1, printer=None): self.parser = SafeConfigParser() self.printer = printer or Printer(verbosity) def __getitem__(self, item): """ Get a section (Section instance) or create it if it doesn't exist. """ if item not in self: self.parser.add_section(item) return Section(self.parser, item) def __contains__(self, item): """ Check if this config has a section """ return self.parser.has_section(item) def read_string(self, data): """ Read the configuration from a string """ sio = StringIO(data) sio.seek(0) self.parser.readfp(sio) def readfp(self, fp): self.parser.readfp(fp) def read(self, filepath): """ Read the configuration from a filepath """ self.parser.read(filepath) def write(self, filepath): """ Write the configuration to a filepath """ if not self.validate(): return False with open(filepath, 'w') as fobj: self.parser.write(fobj) return True def write_file(self, fobj): if not self.validate(): return False self.parser.write(fobj) return True
def main(): usage = '\n'.join([ "%prog [options] <db_ini> <ldap_ini> [organization_id]", "", "If organization_id provided, only that organization", "will be synced. If not, all organizations will be", "synced." ]) parser = OptionParser(usage=usage) parser.add_option("--dry-run", help=("Just print the ldif to stdout, don't apply" " (requires an organization_id)"), action="store_true", default=False, dest="dry_run") opts, args = parser.parse_args() if len(args) < 2: parser.error("At least db_ini and ldap_ini required.") if len(args) == 2 and opts.dry_run: parser.error("--dry-run requires an organization_id") if len(args) > 3: parser.error("At most one organzination_id can be specified at once.") db_ini_fname = args[0] ldap_ini_fname = args[1] dry_run_fil = None if opts.dry_run: dry_run_fil = sys.stdout with open(db_ini_fname, 'rb') as db_ini_fil: with open(ldap_ini_fname, 'rb') as ldap_ini_fil: db_config_parser = SafeConfigParser() db_config_parser.readfp(db_ini_fil) db_server = db.open_conn(db_config_parser) organization_ids = [] if len(args) == 3: organization_ids.append(args[2]) else: organization_ids = _all_organization_ids(db_server) ldap_config_parser = SafeConfigParser() ldap_config_parser.readfp(ldap_ini_fil) ldap_uri = ldap_config_parser.get('sync', 'uri') for organization_id in organization_ids: base_dn = organization_dn(organization_id, db_server) ldap_auth_name = ','.join( [ldap_config_parser.get('sync', 'name'), base_dn]) ldap_auth_pass = ldap_config_parser.get('sync', 'pass') try: ldap_server = ldap.initialize(ldap_uri) ldap_server.simple_bind_s(ldap_auth_name, ldap_auth_pass) sync_from_config(db_server, ldap_server, organization_id, dry_run=dry_run_fil) except: print("Sync failed for organization %s" % base_dn) traceback.print_exc(file=sys.stdout)
'url': 'git://github.com/OpenUpgrade/OpenUpgrade.git', 'branch': '6.0', 'addons_dir': os.path.join('bin', 'addons'), 'root_dir': os.path.join('bin'), 'cmd': 'bin/openerp-server.py --update=all --database=%(db)s ' '--config=%(config)s --stop-after-init --no-xmlrpc ' '--no-netrpc', }, }, } config = SafeConfigParser() parser = OptionParser( description='Migrate script for the impatient or lazy. ' 'Makes a copy of your database, downloads the files necessary to migrate ' 'it as requested and runs the migration on the copy (so your original ' 'database will not be touched). While the migration is running only ' 'errors are shown, for a detailed log see ${branch-dir}/migration.log') parser.add_option("-C", "--config", action="store", type="string", dest="config", help="current openerp config (required)") parser.add_option( "-D", "--database",
class MakefileParser(object): def __init__(self): self.maxDiff = None self.parser = SafeConfigParser() self.parser.read('otbcfg.ini') if not os.path.exists('otbcfg.ini'): raise Exception( "OTB_SOURCE_DIR and OTB_BINARY_DIR must be specified in the file otbcfg.ini" ) self.root_dir = self.parser.get('otb', 'checkout_dir') if not os.path.exists(self.root_dir): raise Exception( "Check otbcfg.ini : OTB_SOURCE_DIR and OTB_BINARY_DIR must be specified there" ) self.build_dir = self.parser.get('otb', 'build_dir') if not os.path.exists(self.build_dir): raise Exception( "Check otbcfg.ini : OTB_SOURCE_DIR and OTB_BINARY_DIR must be specified there" ) self.logger = get_OTB_log() def test_CMakelists(self): provided = {} provided["OTB_SOURCE_DIR"] = self.root_dir provided["OTB_BINARY_DIR"] = self.build_dir provided["OTB_DATA_LARGEINPUT_ROOT"] = os.path.normpath( os.path.join(self.root_dir, "../OTB-Data/Input")) try: with open(os.path.join(self.root_dir, "CMakeLists.txt")) as file_input: content = file_input.read() output = parse(content) defined_paths = [ each for each in output if 'Command' in unicode(type(each)) and "FIND_PATH" in each.name ] the_paths = { key.body[0].contents: [thing.contents for thing in key.body[1:]] for key in defined_paths } the_sets = [ each for each in output if 'Command' in unicode(type(each)) and "SET" in each.name.upper() ] the_sets = { key.body[0].contents: [thing.contents for thing in key.body[1:]] for key in the_sets } the_sets = {key: " ".join(the_sets[key]) for key in the_sets} the_strings = set([ each.body[-1].contents for each in output if 'Command' in unicode(type(each)) and "STRING" in each.name.upper() ]) def mini_clean(item): if item.startswith('"') and item.endswith( '"') and " " not in item: return item[1:-1] return item the_sets = {key: mini_clean(the_sets[key]) for key in the_sets} def templatize(item): if "$" in item: return Template(item) return item for key in the_sets: if key in the_strings: the_sets[key] = the_sets[key].lower() the_sets = {key: templatize(the_sets[key]) for key in the_sets} for path in the_paths: target_file = the_paths[path][1] suggested_paths = [] if len(the_paths[path]) > 2: suggested_paths = the_paths[path][2:] try: provided[path] = find_file(target_file) except Exception as e: for each in suggested_paths: st = Template(each) pac = os.path.abspath(st.safe_substitute(provided)) if os.path.exists(pac): provided[path] = pac break resolve_dict(provided, the_sets) provided.update(the_sets) return provided except Exception as e: traceback.print_exc() self.fail(unicode(e)) def add_make(self, previous_context, new_file): input = open(new_file).read() output = parse(input) apps = [each for each in output if 'Command' in unicode(type(each))] setcommands = [each for each in apps if 'SET' in each.name.upper()] stringcommands = [ each for each in apps if 'STRING' in each.name.upper() ] environment = previous_context def mini_clean(item): if item.startswith('"') and item.endswith('"') and " " not in item: return item[1:-1] return item new_env = {} for command in setcommands: key = command.body[0].contents ct = " ".join([item.contents for item in command.body[1:]]) ct = mini_clean(ct) if "$" in ct: values = Template(ct) else: values = ct new_env[key] = values for stringcommand in stringcommands: key = stringcommand.body[-1].contents ct = stringcommand.body[-2].contents ct = mini_clean(ct.lower()) if "$" in ct: values = LowerTemplate(ct) else: values = ct new_env[key] = values resolve_dict(environment, new_env) environment.update(new_env) return environment def get_apps(self, the_makefile, the_dict): input = open(the_makefile).read() output = parse(input) apps = [each for each in output if 'Command' in unicode(type(each))] otb_apps = [ each for each in apps if 'OTB_TEST_APPLICATION' in each.name.upper() ] return otb_apps def get_tests(self, the_makefile, the_dict): input = open(the_makefile).read() output = parse(input) apps = [each for each in output if 'Command' in unicode(type(each))] otb_tests = [each for each in apps if 'ADD_TEST' in each.name.upper()] return otb_tests def get_apps_with_context(self, the_makefile, the_dict): input = open(the_makefile).read() output = parse(input) def is_a_command(item): return 'Command' in unicode(type(item)) appz = [] context = [] for each in output: if is_a_command(each): if 'FOREACH' in each.name and 'ENDFOREACH' not in each.name: args = [item.contents for item in each.body] context.append(args) elif 'ENDFOREACH' in each.name: context.pop() elif 'OTB_TEST_APPLICATION' in each.name.upper(): appz.append((each, context[:])) return appz def get_name_line(self, the_list, the_dict): items = ('NAME', 'APP', 'OPTIONS', 'TESTENVOPTIONS', 'VALID') itemz = [[], [], [], [], []] last_index = 0 for each in the_list: if each.contents in items: last_index = items.index(each.contents) else: itemz[last_index].append(each.contents) result = itemz[0][0] the_string = Template(result).safe_substitute(the_dict) if '$' in the_string: neo_dict = the_dict the_string = Template(the_string).safe_substitute(neo_dict) while '$' in the_string: try: the_string = Template(the_string).substitute(neo_dict) except KeyError as e: self.logger.warning("Key %s is not found in makefiles" % unicode(e)) neo_dict[unicode(e)] = "" if 'string.Template' in the_string: raise Exception("Unexpected toString call in %s" % the_string) return the_string def get_command_line(self, the_list, the_dict): items = ('NAME', 'APP', 'OPTIONS', 'TESTENVOPTIONS', 'VALID') itemz = [[], [], [], [], []] last_index = 0 for each in the_list: if each.contents in items: last_index = items.index(each.contents) else: itemz[last_index].append(each.contents) result = [] result.extend(["otbcli_%s" % each for each in itemz[1]]) if len(result[0]) == 7: raise Exception("App name is empty!") result.extend(itemz[2]) result.append("-testenv") result.extend(itemz[3]) the_string = Template(" ".join(result)).safe_substitute(the_dict) if '$' in the_string: neo_dict = the_dict the_string = Template(" ".join(result)).safe_substitute(neo_dict) while '$' in the_string: try: the_string = Template(the_string).substitute(neo_dict) except KeyError as e: self.logger.warning("Key %s is not found in makefiles" % unicode(e)) neo_dict[unicode(e)] = "" if 'string.Template' in the_string: raise Exception("Unexpected toString call in %s" % the_string) return the_string def get_test(self, the_list, the_dict): items = ('NAME', 'APP', 'OPTIONS', 'TESTENVOPTIONS', 'VALID') itemz = [[], [], [], [], []] last_index = 0 for each in the_list: if each.contents in items: last_index = items.index(each.contents) else: itemz[last_index].append(each.contents) result = ["otbTestDriver"] result.extend(itemz[4]) if len(result) == 1: return "" the_string = Template(" ".join(result)).safe_substitute(the_dict) if '$' in the_string: neo_dict = the_dict the_string = Template(" ".join(result)).safe_substitute(neo_dict) while '$' in the_string: try: the_string = Template(the_string).substitute(neo_dict) except KeyError as e: self.logger.warning("Key %s is not found in makefiles" % unicode(e)) neo_dict[unicode(e)] = "" if 'string.Template' in the_string: raise Exception("Unexpected toString call in %s" % the_string) return the_string def test_algos(self): tests = {} algos_dir = os.path.join(self.root_dir, "Testing/Applications") makefiles = find_files("CMakeLists.txt", algos_dir) to_be_excluded = os.path.join(self.root_dir, "Testing/Applications/CMakeLists.txt") if to_be_excluded in makefiles: makefiles.remove(to_be_excluded) resolve_algos = {} for makefile in makefiles: intermediate_makefiles = [] path = makefile.split(os.sep)[len(self.root_dir.split(os.sep)):-1] for ind in range(len(path)): tmp_path = path[:ind + 1] tmp_path.append("CMakeLists.txt") tmp_path = os.sep.join(tmp_path) candidate_makefile = os.path.join(self.root_dir, tmp_path) if os.path.exists(candidate_makefile): intermediate_makefiles.append(candidate_makefile) resolve_algos[makefile] = intermediate_makefiles dict_for_algo = {} for makefile in makefiles: basic = self.test_CMakelists() last_context = self.add_make( basic, os.path.join(self.root_dir, "Testing/Utilities/CMakeLists.txt")) for intermediate_makefile in resolve_algos[makefile]: last_context = self.add_make(last_context, intermediate_makefile) dict_for_algo[makefile] = last_context for makefile in makefiles: appz = self.get_apps_with_context(makefile, dict_for_algo[makefile]) for app, context in appz: if len(context) == 0: import copy ddi = copy.deepcopy(dict_for_algo[makefile]) tk_dict = autoresolve(ddi) tk_dict = autoresolve(tk_dict) name_line = self.get_name_line(app.body, tk_dict) command_line = self.get_command_line(app.body, tk_dict) test_line = self.get_test(app.body, tk_dict) if '$' in test_line or '$' in command_line: if '$' in command_line: self.logger.error(command_line) if '$' in test_line: self.logger.warning(test_line) else: tests[name_line] = (command_line, test_line) else: contexts = {} for iteration in context: key = iteration[0] values = [each[1:-1].lower() for each in iteration[1:]] contexts[key] = values keyorder = contexts.keys() import itertools pool = [ each for each in itertools.product(*contexts.values()) ] import copy for poolinstance in pool: neo_dict = copy.deepcopy(dict_for_algo[makefile]) zipped = zip(keyorder, poolinstance) for each in zipped: neo_dict[each[0]] = each[1] ak_dict = autoresolve(neo_dict) ak_dict = autoresolve(ak_dict) ak_dict = autoresolve(ak_dict) ddi = ak_dict name_line = self.get_name_line(app.body, ddi) command_line = self.get_command_line(app.body, ddi) test_line = self.get_test(app.body, ddi) if '$' in command_line or '$' not in test_line: if '$' in command_line: self.logger.error(command_line) if '$' in test_line: self.logger.warning(test_line) else: tests[name_line] = (command_line, test_line) return tests
#!/usr/bin/python # Modified 23-May-2016 # [email protected] # Parse all voltages and store in /home/pi/LOGGING for later analysis from time import sleep import re import redis from ConfigParser import SafeConfigParser parser = SafeConfigParser() parser.read('/etc/pithermostat.conf') redishost=parser.get('redis','broker') redisport=int(parser.get('redis','port')) redisdb=parser.get('redis','db') redistimeout=float(parser.get('redis','timeout')) redthis=redis.StrictRedis(host=redishost,port=redisport, db=redisdb, socket_timeout=redistimeout) import logging, datetime dt = datetime.datetime.now() logging.basicConfig(filename='/home/pi/LOGGING/voltages_%i_%i_%i.log' %(dt.year, dt.month, dt.day),level=logging.INFO) logging_string="" try: logging_string += ("%s\t" % dt) regex_volt = re.compile(r'^voltage\/(.*)\/sensor$') # Find all the keys matching voltage/*/sensor # For each key find, the sensor value all_voltkeys=(redthis.keys(pattern="voltage/*/sensor"))
#!/usr/bin/env python import MySQLdb import sys import os import argparse from ConfigParser import SafeConfigParser parser = SafeConfigParser() if os.path.isfile('/etc/ansible-hosts/ansible-hosts.conf'): config = '/etc/ansible-hosts/ansible-hosts.conf' else: config = os.path.join(os.path.dirname(__file__), '../conf/ansible-hosts.conf') parser.read(config) dbhost = parser.get('zabbix', 'host') dbuser = parser.get('zabbix', 'user') dbpass = parser.get('zabbix', 'pass') dbname = parser.get('zabbix', 'name') dbvers = parser.get('zabbix', 'version') con = None def connect_to_db(): """Connect to the puppet-dashboard database""" try: con = MySQLdb.connect(host=dbhost, user=dbuser, db=dbname) return con
# function to pull id off hyperlink #-------------------------------------------- def pluckId(str): # Split the hyperlink string using the delimiter '/' parts = str.split('/') id_part = parts[7] str = id_part.replace('"}', '') return (str) #-------------------------------------------- # Read config file #-------------------------------------------- from ConfigParser import SafeConfigParser parser = SafeConfigParser() parser.read('/home/helper/local_config.cfg') SIERRA_API_HOST = parser.get('sierra', 'SIERRA_API_HOST') SIERRA_API_KEY = parser.get('sierra', 'SIERRA_API_KEY') SIERRA_API_KEY_SECRET = parser.get('sierra', 'SIERRA_API_KEY_SECRET') AUTH_URI = '/iii/sierra-api/v5/token' VALIDATE_URI = '/iii/sierra-api/v5/items/validate' ITEMS_URI = '/iii/sierra-api/v5/items/' #----------------------------------------------- # Prepare URL, custom headers, and body for auth #----------------------------------------------- # Create URL for auth endpoint
class DidelConfig(object): """ A wrapper for Python's ConfigParser which uses paths instead of sections It must be saved with ``.save()`` to be persistent. >>> config = DidelConfig("foo.conf") >>> config.set("foo", 45) >>> config.get("foo") "45" >>> config.set("stuff.bar", "hello") >>> config.get("stuff.bor") None >>> config.get("stuff.bar") "hello" >>> config.save() """ SOURCE_FILE = expanduser('~/.didel.conf') SECRET_SECTION = 'secret' _default = None @classmethod def get_default(cls): """ Get the default config object. """ if not cls._default: cls._default = cls() return cls._default def __init__(self, filename=SOURCE_FILE): self.filename = filename self.config = SafeConfigParser() if not isfile(self.filename): self.save() self.load() def load(self): self.config.read(self.filename) def save(self): with open(self.filename, 'w') as f: self.config.write(f) # octal numbers are written as '0600' in Python2 and '0o600' in Python3 # to avoid any problem we're writing the permissions as a decimal # number here chmod(self.filename, 384) def _split_path(self, key): """ Split a key into a ``(section, key)`` tuple. """ return key.split('.', 1) if '.' in key else ('DEFAULT', key) def set(self, key, value, save=False): """ Set a value. It creates the section if it doesn't exist >>> config.set("foo.bar", "42") """ section, key = self._split_path(key) if not self.config.has_section(section): self.config.add_section(section) self.config.set(section, key, value) if save: self.save() return self def get(self, key, default=None): """ Get a value. It returns ``default`` if it doesn't exist. >>> config.get("foo.bar") "42" >>> config.get("dontexist") None >>> config.get("dontexist", "foo") "foo" """ if not self.has_key(key): return default section, key = self._split_path(key) return self.config.get(section, key) def has_key(self, key): """ Test if a key exist """ section, key = self._split_path(key) cfg = self.config return cfg.has_section(section) and cfg.has_option(section, key) def set_secret(self, key, value, save=False): """ Same as ``set`` but use a simple encryption. If no section is specified, it uses the secret one. """ if '.' not in key: key = '%s.%s' % (self.SECRET_SECTION, key) return self.set(key, base64.b16encode(value), save) def get_secret(self, key): """ Same as ``get`` but use a simple encryption. If no section is specified, it uses the secret one. """ if '.' not in key: key = '%s.%s' % (self.SECRET_SECTION, key) value = self.get(key) if value is not None: return base64.b16decode(value) def has_secret_key(self, key): """ Same as ``has_key`` but for the secret section """ if '.' not in key: key = '%s.%s' % (self.SECRET_SECTION, key) return self.has_key(key) def get_credentials(self): """ Return a pair of "username" and "password" keys. This is roughly the same as the following code: :: (cfg.get_secret('username'), cfg.get_secret('password')) Except that it can try various fallbacks if one of them is ``None`` (i.e. the credentials weren't properly configured). """ username = self.get_secret('username') passwd = self.get_secret('password') if username is None or passwd is None: return self._get_netrc_credentials() return (username, passwd) def _get_netrc_credentials(self): """ Fallback of ``get_credentials`` if credentials weren't configured. """ from netrc import NetrcParseError, netrc as NetrcFile source = None try: source = NetrcFile() except NetrcParseError: return (None, None) hosts = [ 'didel.script.univ-paris-diderot.fr', 'auth.univ-paris-diderot.fr', 'univ-paris-diderot.fr', ] for host in hosts: res = source.authenticators(host) if res: login, _, password = res return (login, password) return (None, None) def items(self): """ Yield all items from this config, as tuples of ``(key, value)`` """ for section in self.config.sections(): if section == self.SECRET_SECTION: continue for k, v in self.config.items(section): yield ('%s.%s' % (section, k), v)
class MSDNAnnotationDialog(QtWidgets.QDialog): def read_config(self): config = {} if not self.config_parser.has_section('Functions') or \ not self.config_parser.has_section('Arguments') or \ not self.config_parser.has_section('Constants'): # Create default self.config_parser.add_section('Functions') self.config_parser.add_section('Arguments') self.config_parser.add_section('Constants') config['functions_annotate'] = True config['functions_repeatable_comment'] = False config['arguments_annotate'] = True config['constants_import'] = True config['msdn_data_dir'] = getDefaultMsdnDataDir() else: # Read existing config['functions_annotate'] = self.config_parser.getboolean( 'Functions', 'annotate') config[ 'functions_repeatable_comment'] = self.config_parser.getboolean( 'Functions', 'repeatable_comment') config['arguments_annotate'] = self.config_parser.getboolean( 'Arguments', 'annotate') config['constants_import'] = self.config_parser.getboolean( 'Constants', 'import') try: config['msdn_data_dir'] = self.config_parser.get( 'Constants', 'msdn_data_dir') except ConfigParser.NoOptionError: config['msdn_data_dir'] = getDefaultMsdnDataDir() return config def save_config(self): self.config_parser.set('Functions', 'annotate', str(self.chkFunctionsAnnotate.isChecked())) self.config_parser.set('Functions', 'repeatable_comment', str(self.chkFuntcsRepeatable.isChecked())) self.config_parser.set('Arguments', 'annotate', str(self.chkArgumentsAnnotate.isChecked())) self.config_parser.set('Constants', 'import', str(self.chkConstantsImport.isChecked())) self.config_parser.set('Constants', 'msdn_data_dir', str(self.dirText.text())) with open(self.file_path, 'wb') as conffile: self.config_parser.write(conffile) def change_image(self): funct = self.chkFunctionsAnnotate.isChecked() and \ self.chkFuntcsRepeatable.isChecked() image = "{}-{}-{}.png".format( int(funct), int(self.chkArgumentsAnnotate.isChecked()), int(self.chkConstantsImport.isChecked())) img_path = os.path.abspath( os.path.join(os.path.dirname(__file__), 'IDB_MSDN_Annotator', 'img')) self.pic.setPixmap(QtGui.QPixmap(os.path.join(img_path, image))) def on_select_dir(self): msdnDir = QtWidgets.QFileDialog.getExistingDirectory( caption='Select directory containing MSDN XML Database') if len(msdnDir) != 0: self.dirText.setText(msdnDir) def toggle_option(self): disable = not self.chkFunctionsAnnotate.isChecked() self.chkFuntcsRepeatable.setDisabled(disable) self.change_image() def on_ok_button(self): #test the msdn data dir msdnpath = os.path.join(self.dirText.text(), IDB_MSDN_Annotator.MSDN_INFO_FILE) if not os.path.exists(msdnpath): g_logger.info('Error - no msdn info file: %s', msdnpath) ret = QtWidgets.QMessageBox.warning( self, 'MSDN Info Not Found', 'The file %s was not found in the specified MSDN Data Directory' % IDB_MSDN_Annotator.MSDN_INFO_FILE, QtWidgets.QMessageBox.Ok) #self.done(QtWidgets.QDialog.Rejected) return self.done(QtWidgets.QDialog.Accepted) g_logger.info('Saving config') self.save_config() config = self.read_config() idaapi.set_script_timeout(1) IDB_MSDN_Annotator.main(config) idaapi.set_script_timeout(0) def set_form_values(self): # Set values according to configuration file if self.config['functions_annotate']: self.chkFunctionsAnnotate.setCheckState(QtCore.Qt.Checked) if self.config['functions_repeatable_comment']: self.chkFuntcsRepeatable.setCheckState(QtCore.Qt.Checked) else: self.chkFuntcsRepeatable.setDisabled(True) self.chkFuntcsRepeatable.setCheckState(QtCore.Qt.Unchecked) if self.config['arguments_annotate']: self.chkArgumentsAnnotate.setCheckState(QtCore.Qt.Checked) if self.config['constants_import']: self.chkConstantsImport.setCheckState(QtCore.Qt.Checked) self.dirText.setText(self.config['msdn_data_dir']) def populate_form(self): layout = QtWidgets.QVBoxLayout() # Functions layout1 = QtWidgets.QVBoxLayout() groupBox = QtWidgets.QGroupBox('Markup Options') self.chkFunctionsAnnotate = QtWidgets.QCheckBox( "Annotate function names" " (see note)") layout1.addWidget(self.chkFunctionsAnnotate) self.chkFuntcsRepeatable = QtWidgets.QCheckBox( "Use repeatable comments " "for function name " "annotations") layout1.addWidget(self.chkFuntcsRepeatable) # Arguments self.chkArgumentsAnnotate = QtWidgets.QCheckBox("Annotate function " "arguments (see note)") layout1.addWidget(self.chkArgumentsAnnotate) # Constants self.chkConstantsImport = QtWidgets.QCheckBox("Rename constants") layout1.addWidget(self.chkConstantsImport) groupBox.setLayout(layout1) layout.addWidget(groupBox) #MSDN data dir hlayout = QtWidgets.QHBoxLayout() self.selectDirButton = QtWidgets.QPushButton('...') self.selectDirButton.clicked.connect(self.on_select_dir) hlayout.addWidget(self.selectDirButton) self.dirText = QtWidgets.QLineEdit() self.dirText.setReadOnly(True) hlayout.addWidget(self.dirText) groupBox = QtWidgets.QGroupBox('MSDN Data Directory') groupBox.setLayout(hlayout) layout.addWidget(groupBox) # Toggle self.chkFunctionsAnnotate.clicked.connect(self.toggle_option) self.chkFuntcsRepeatable.clicked.connect(self.change_image) self.chkArgumentsAnnotate.clicked.connect(self.change_image) self.chkConstantsImport.clicked.connect(self.change_image) self.set_form_values() info_string = "Note: Annotating functions and/or arguments allows " \ "you to hover\nthe respective element in order to " \ "show its description." layout.addWidget(QtWidgets.QLabel(info_string)) # Buttons button_ok = QtWidgets.QPushButton('&OK') button_ok.setDefault(True) button_ok.clicked.connect(self.on_ok_button) #button_ok.clicked.connect(self.close) layout.addWidget(button_ok) button_cancel = QtWidgets.QPushButton('&Cancel') button_cancel.clicked.connect(self.close) layout.addWidget(button_cancel) # Image self.pic = QtWidgets.QLabel() self.pic.setGeometry(0, 0, 663, 203) self.change_image() # Layout right layout_r = QtWidgets.QVBoxLayout() #layout_r.addWidget(QtWidgets.QLabel("Annotation preview")) layout_r.addWidget(self.pic) groupBox = QtWidgets.QGroupBox('Annotation preview') groupBox.setLayout(layout_r) # Setup layouts h_layout = QtWidgets.QHBoxLayout() h_layout.addLayout(layout) #h_layout.addLayout(layout_r) h_layout.addWidget(groupBox) self.setLayout(h_layout) def __init__(self, parent=None): self._logger = logging.getLogger(__name__ + '.' + self.__class__.__name__) self._logger.debug('Starting UI') QtWidgets.QDialog.__init__( self, parent, QtCore.Qt.WindowSystemMenuHint | QtCore.Qt.WindowTitleHint) self.setWindowTitle("MSDN Annotations Configuration") # Parse configuration file to dictionary self.file_path = os.path.abspath( os.path.join(idaapi.get_user_idadir(), CONFIG_FILE)) self.config_parser = SafeConfigParser() self.config_parser.read(self.file_path) self.config = self.read_config() self.populate_form()
def create_argument(): usage = 'usage: sshpt [options] "[command1]" "[command2]" ...' parser = ArgumentParser(usage=usage) parser.add_argument('-v', '--version', action='version', version=version.__version__) host_group = parser.add_mutually_exclusive_group(required=True) host_group.add_argument( "-f", "--file", dest="hostfile", default=None, type=open, help="Location of the file containing the host list.") host_group.add_argument("-S", "--stdin", dest="stdin", default=False, action="store_true", help="Read hosts from standard input") host_group.add_argument( "--hosts", dest='hosts', default=None, help= 'Specify a host list on the command line. ex)--hosts="host1:host2:host3"' ) host_group.add_argument( "-i", "--ini-file", default=None, nargs=2, help="Configuration file with INI Format. ex)--ini-file path, server") host_group.add_argument( "-j", "--json", default=None, nargs=2, help="Configuration file with JSON Format. ex)--json path, server") parser.add_argument("-k", "--key-file", dest="keyfile", default=None, metavar="<file>", help="Location of the private key file") parser.add_argument( "-K", "--key-pass", dest="keypass", metavar="<password>", default=None, help="The password to be used when use the private key file).") parser.add_argument( "-o", "--outfile", dest="outfile", default=None, metavar="<file>", help="Location of the file where the results will be saved.") parser.add_argument( "-a", "--authfile", dest="authfile", default=None, metavar="<file>", help= 'Location of the file containing the credentials to be used for connections (format is "username:password").' ) parser.add_argument( "-T", "--threads", dest="max_threads", type=int, default=10, metavar="<int>", help= "Number of threads to spawn for simultaneous connection attempts [default: 10]." ) parser.add_argument( "-P", "--port", dest="port", type=int, default=22, metavar="<port>", help="The port to be used when connecting. Defaults to 22.") parser.add_argument( "-u", "--username", dest="username", default='root', metavar="<username>", help= "The username to be used when connecting. Defaults to the currently logged-in user." ) parser.add_argument( "-p", "--password", dest="password", default=None, metavar="<password>", help= "The password to be used when connecting (not recommended--use an authfile unless the username and password are transient)." ) parser.add_argument( "-q", "--quiet", action="store_false", dest="verbose", default=True, help="Don't print status messages to stdout (only print errors).") parser.add_argument( "-d", "--dest", dest="remote_filepath", default="/tmp/", metavar="<path>", help= "Path where the file should be copied on the remote host (default: /tmp/)." ) parser.add_argument( "-x", "--execute", action="store_true", dest="execute", default=False, help="Execute the copied file (just like executing a given command).") parser.add_argument( "-r", "--remove", action="store_true", dest="remove", default=False, help="Remove (clean up) the SFTP'd file after execution.") parser.add_argument( "-t", "--timeout", dest="timeout", default=30, metavar="<seconds>", help= "Timeout (in seconds) before giving up on an SSH connection (default: 30)" ) parser.add_argument( "-s", "--sudo", nargs="?", action="store", dest="sudo", default=False, help="Use sudo to execute the command (default: as root).") parser.add_argument("-O", "--output-format", dest="output_format", choices=['csv', 'json'], default="csv", help="Ouptut format") action_group = parser.add_mutually_exclusive_group(required=True) action_group.add_argument( "-c", "--copy-file", dest="local_filepath", default=None, metavar="<file>", help= "Location of the file to copy to and optionally execute (-x) on hosts." ) action_group.add_argument('commands', metavar='Commands', type=str, nargs='*', default=False, help='Commands') options = parser.parse_args() if options.hostfile: options.hosts = options.hostfile.read() elif options.stdin: # if stdin wasn't piped in, prompt the user for it now if not select.select([ sys.stdin, ], [], [], 0.0)[0]: sys.stdout.write("Enter list of hosts (one entry per line). ") sys.stdout.write("Ctrl-D to end input.\n") # in either case, read data from stdin options.hosts = sys.stdin.read() elif options.hosts: options.hosts = options.hosts.split(":") elif options.ini_file: ini_config = SafeConfigParser(allow_no_value=True) ini_config.read(options.ini_file[0]) options.hosts = [ server[1] for server in ini_config.items(options.ini_file[1]) ] if ini_config.has_section('Commands'): for command in ini_config.items("Commands"): if options.commands == command[0]: options.commands = command[1] break elif options.json: pass if options.authfile: credentials = open(options.authfile).readline() options.username, options.password = credentials.split(":") # Get rid of trailing newline options.password = Password(password.rstrip('\n')) options.sudo = 'root' if options.sudo is None else options.sudo # Get the username and password to use when checking hosts if options.username is None: options.username = raw_input('Username: '******'Passphrase: ')) elif options.password is None: options.password = Password(getpass.getpass('Password: '******'': print('\nPlease type the password') raise Exception('Please type the password') options.hosts = _normalize_hosts(options.hosts) return options
def __init__(self, filename=SOURCE_FILE): self.filename = filename self.config = SafeConfigParser() if not isfile(self.filename): self.save() self.load()
wshedBoundary = arcpy.GetParameterAsText(3) # Bounding Box, as layer bufferDi = arcpy.GetParameterAsText(4) cell_size = arcpy.GetParameterAsText(5) outlet_fullpath = arcpy.GetParameterAsText(6) # as layer again areaThreshold = arcpy.GetParameterAsText( 7) # Threshold for defining stream in km2 path2ssurgoFolders = arcpy.GetParameterAsText(8) path2statsgoFolders = arcpy.GetParameterAsText(9) outCS = arcpy.GetParameterAsText(10) # INPUTS, if script ran as standalone if projDir == "": # initializing ini_fname = "./Onion_simulation.ini" config = SafeConfigParser() config.read(ini_fname) # path to directories path2ssurgoFolders = config.get('directory', 'ssurgo_collection') path2statsgoFolders = config.get('directory', 'statsgo_collection') projDir = config.get('directory', 'projDir') # path to shapefiles outlet_fullpath = config.get('input_files', 'outlet_fullpath') wshedBoundary = config.get('input_files', 'wshedBoundary') # path to other variables areaThreshold = config.get('other_parameter', 'areaThreshold') inUsername = config.get('other_parameter', 'inUsername') inPassword = config.get('other_parameter', 'inPassword')
# network address translation etc. App engine software is at: https://github.com/jamorham/Parakeet-App-Engine parakeet_url = "https://wixel-receiver.appspot.com/receiver.cgi" parakeet_passcode = "12345" # If you wired your wixel directly to the serial pins of the raspberry Pi set this to True # for usb connected wixels leave it set as False use_raspberry_pi_internal_serial_port = False # Or you can store the configuration in a file which overrides whatever is set in this script config = SafeConfigParser({'HOST': HOST, 'PORT': PORT, 'remoteHosts': remoteHosts, 'parakeet_url': parakeet_url, 'parakeet_passcode': parakeet_passcode, 'use_raspberry_pi_internal_serial_port': False, 'DEFAULT_LOG_FILE': DEFAULT_LOG_FILE}) # script should be python-usb-wixel.py and then config file will be python-usb-wixel.cfg config_path = re.sub(r".py$", ".cfg", os.path.realpath(__file__)) if (os.path.isfile(config_path)): config.read(config_path) print "Loading configuration from: " + config_path HOST = config.get('main', 'HOST').strip() PORT = config.getint('main', 'PORT') remoteHosts = config.get('main', 'remoteHosts').strip() parakeet_url = config.get('main', 'parakeet_url').strip() parakeet_passcode = config.get('main', 'parakeet_passcode').strip()
from ConfigParser import SafeConfigParser import os from os.path import expanduser import tempfile __author__ = 'bubble' parser = SafeConfigParser() def init(): parser.read('../bin/config.ini') def get_remote_zip_url(): return parser.get('REMOTE', 'zip_url') def get_app_name(): return parser.get('APP', 'name') def get_user_file(): return parser.get('APP', 'user_file') def get_certificate_path(): return parser.get('APP', 'certificate') def get_data_store_name():
import numpy as np import scipy import common from ConfigParser import SafeConfigParser parser = SafeConfigParser() parser.read("../config.ini") width = int(parser.get("Sampling", "width")) height = int(parser.get("Sampling", "height")) amp_min = float(parser.get("Sampling", "amp_min")) amp_max = float(parser.get("Sampling", "amp_max")) rad_min = float(parser.get("Sampling", "rad_min")) rad_max = float(parser.get("Sampling", "rad_max")) prefix = parser.get("Misc", "prefix") location = parser.get("Misc", "location") output_folder = location + "/" + prefix x,y,r,a,L = np.loadtxt(output_folder + "/" + prefix + "_out_points_som.txt", unpack=True) all_vals = np.vstack((x,y,r,a,L)) def find_sources(xlower, xupper, ylower, yupper, mask, varindex): vals = all_vals[varindex] #select the var index we want smoothed = common.smooth(vals)
def main(): p = opt.ArgumentParser(description=""" Constructs a dictionary for image representation based on a set of specified local descriptors. The dictionary is built from a set of images given as a list in an input file. """) p.add_argument('config', action='store', help='a configuration file') args = p.parse_args() cfg_file = args.config parser = SafeConfigParser() parser.read(cfg_file) #--------- # sampler: if not parser.has_section('sampler'): raise ValueError('"sampler" section is mandatory') if not parser.has_option('sampler', 'type'): raise ValueError('"sampler.type" is mandatory') tmp = parser.get('sampler', 'type').lower() if tmp not in ['random', 'sliding']: raise ValueError('Unkown sampling type') sampler_type = tmp if not parser.has_option('sampler', 'window_size'): raise ValueError('"sampler.window_size" is mandatory') wnd_size = ast.literal_eval(parser.get('sampler', 'window_size')) if type(wnd_size) != tuple: raise ValueError('"sampler.window_size" specification error') it_start = (0, 0) it_step = (1, 1) if sampler_type == 'sliding': if parser.has_option('sampler', 'start'): it_start = ast.literal_eval(parser.get('sampler', 'start')) if parser.has_option('sampler', 'step'): it_step = ast.literal_eval(parser.get('sampler', 'step')) nwindows = parser.getint('sampler', 'nwindows') local_descriptors = [] #--------- # haar: if parser.has_section('haar'): tmp = True if parser.has_option('haar', 'norm'): tmp = parser.getboolean('haar', 'norm') if len(parser.items('haar')) == 0: # empty section, use defaults h = HaarLikeDescriptor(HaarLikeDescriptor.haars1()) else: h = HaarLikeDescriptor([ ast.literal_eval(v) for n, v in parser.items('haar') if n.lower() != 'norm' ], _norm=tmp) local_descriptors.append(h) #--------- # identity: if parser.has_section('identity'): local_descriptors.append(IdentityDescriptor()) #--------- # stats: if parser.has_section('stats'): tmp = [] if parser.has_option('stats', 'mean') and parser.getboolean( 'stats', 'mean'): tmp.append('mean') if parser.has_option('stats', 'std') and parser.getboolean( 'stats', 'std'): tmp.append('std') if parser.has_option('stats', 'kurtosis') and parser.getboolean( 'stats', 'kurtosis'): tmp.append('kurtosis') if parser.has_option('stats', 'skewness') and parser.getboolean( 'stats', 'skewness'): tmp.append('skewness') if len(tmp) == 0: tmp = None local_descriptors.append(StatsDescriptor(tmp)) #--------- # hist: if parser.has_section('hist'): tmp = (0.0, 1.0) tmp2 = 10 if parser.has_option('hist', 'min_max'): tmp = ast.literal_eval(parser.get('hist', 'min_max')) if type(tmp) != tuple: raise ValueError('"hist.min_max" specification error') if parser.has_option('hist', 'nbins'): tmp2 = parser.getint('hist', 'nbins') local_descriptors.append(HistDescriptor(_interval=tmp, _nbins=tmp2)) #--------- # HoG if parser.has_section('hog'): tmp = 9 tmp2 = (128, 128) tmp3 = (4, 4) if parser.has_option('hog', 'norient'): tmp = parser.getint('hog', 'norient') if parser.has_option('hog', 'ppc'): tmp2 = ast.literal_eval(parser.get('hog', 'ppc')) if type(tmp2) != tuple: raise ValueError('"hog.ppc" specification error') if parser.has_option('hog', 'cpb'): tmp3 = ast.literal_eval(parser.get('hog', 'cpb')) if type(tmp3) != tuple: raise ValueError('"hog.cpb" specification error') local_descriptors.append( HOGDescriptor(_norient=tmp, _ppc=tmp2, _cpb=tmp3)) #--------- # LBP if parser.has_section('lbp'): tmp = 3 tmp2 = 8 * tmp tmp3 = 'uniform' if parser.has_option('lbp', 'radius'): tmp = parser.getint('lbp', 'radius') if parser.has_option('lbp', 'npoints'): tmp2 = parser.getint('lbp', 'npoints') if tmp2 == 0: tmp2 = 8 * tmp if parser.has_option('lbp', 'method'): tmp3 = parser.get('lbp', 'method') local_descriptors.append( LBPDescriptor(radius=tmp, npoints=tmp2, method=tmp3)) #--------- # Gabor if parser.has_section('gabor'): tmp = np.array([0.0, np.pi / 4.0, np.pi / 2.0, 3.0 * np.pi / 4.0], dtype=np.double) tmp2 = np.array([3.0 / 4.0, 3.0 / 8.0, 3.0 / 16.0], dtype=np.double) tmp3 = np.array([1.0, 2 * np.sqrt(2.0)], dtype=np.double) if parser.has_option('gabor', 'theta'): tmp = ast.literal_eval(parser.get('gabor', 'theta')) if parser.has_option('gabor', 'freq'): tmp2 = ast.literal_eval(parser.get('gabor', 'freq')) if parser.has_option('gabor', 'sigma'): tmp3 = ast.literal_eval(parser.get('gabor', 'sigma')) local_descriptors.append( GaborDescriptor(theta=tmp, freq=tmp2, sigma=tmp3)) print('No. of descriptors: ', len(local_descriptors)) #--------- # data if not parser.has_section('data'): raise ValueError('Section "data" is mandatory.') data_path = parser.get('data', 'input_path') img_ext = parser.get('data', 'image_type') res_path = parser.get('data', 'output_path') img_files = glob.glob(data_path + '/*.' + img_ext) if len(img_files) == 0: return ## Process: sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) # unbuferred output for img_name in img_files: print("Image: ", img_name, " ...reading... ", end='') im = imread(img_name) print("preprocessing... ", end='') # -preprocessing if im.ndim == 3: im_h, _, _ = rgb2he2(im) else: raise ValueError('Input image must be RGB.') # detect object region: # -try to load a precomputed mask: mask_file_name = data_path+'/mask/'+ \ os.path.splitext(os.path.split(img_name)[1])[0]+ \ '_tissue_mask.pbm' if os.path.exists(mask_file_name): print('(loading mask)...', end='') mask = imread(mask_file_name) mask = img_as_bool(mask) mask = remove_small_objects(mask, min_size=500, connectivity=1, in_place=True) else: print('(computing mask)...', end='') mask, _ = tissue_region_from_rgb(im, _min_area=500) row_min, col_min, row_max, col_max = bounding_box(mask) im_h[np.logical_not(mask)] = 0 # make sure background is 0 mask = None im = None im_h = im_h[row_min:row_max + 1, col_min:col_max + 1] print("growing the bag...", end='') # -image bag growing bag = None # bag for current image for d in local_descriptors: if bag is None: bag = grow_bag_from_new_image(im_h, d, wnd_size, nwindows, discard_empty=True) else: bag[d.name] = grow_bag_with_new_features(im_h, bag['regs'], d)[d.name] # save the results for each image, one file per descriptor desc_names = bag.keys() desc_names.remove('regs') # keep all keys but the regions # -save the ROI from the original image: res_file = res_path + '/' + 'roi-' + \ os.path.splitext(os.path.split(img_name)[1])[0] + '.dat' with open(res_file, 'w') as f: f.write('\t'.join( [str(x_) for x_ in [row_min, row_max, col_min, col_max]])) for dn in desc_names: res_file = res_path + '/' + dn + '_bag-' + \ os.path.splitext(os.path.split(img_name)[1])[0] + '.dat' with open(res_file, 'w') as f: n = len(bag[dn]) # total number of descriptors of this type for i in range(n): s = '\t'.join([str(x_) for x_ in bag['regs'][i]]) + '\t' + \ '\t'.join([str(x_) for x_ in bag[dn][i]]) + '\n' f.write(s) print('OK') bag = None gc.collect() gc.collect()
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import urllib import gzip import json import MySQLdb from datetime import datetime from ConfigParser import SafeConfigParser config_file = '../config/config.ini' config = SafeConfigParser(os.environ) config.read(config_file) db_host = config.get('mysql', 'host') db_name = config.get('mysql', 'database') user = config.get('mysql', 'user') password = config.get('mysql', 'password') conn = MySQLdb.connect(host=db_host, user=user, passwd=password, db=db_name) url = config.get('ubike', 'url') # print "downloading with urllib" urllib.urlretrieve(url, "data.gz") f = gzip.open('data.gz', 'r') jdata = f.read() f.close() data = json.loads(jdata) c = conn.cursor() conn.set_character_set('utf8') for key, value in data["retVal"].iteritems():
import csv import smtplib import textwrap import prettytable from email.MIMEMultipart import MIMEMultipart from email.MIMEText import MIMEText from ConfigParser import SafeConfigParser WORKING_PATH = os.path.dirname(sys.argv[0]) try: config_file = WORKING_PATH + "/" + "report.ini" with open(config_file): parser = SafeConfigParser() parser.read(config_file) except IOError: print "Error!, Config File Not Found" raise SystemExit def process_config(section, option): for section_name in parser.sections(): try: if section_name == section: list_items = parser.get(section_name, option) except: list_items = None return list_items
# coding=utf-8 from ConfigParser import SafeConfigParser import json from decimal import Decimal config = SafeConfigParser() Data = None FULL_LIST = ['STR', 'BTC', 'BTS', 'CLAM', 'DOGE', 'DASH', 'LTC', 'MAID', 'XMR', 'XRP', 'ETH', 'FCT'] # This module is the middleman between the bot and a SafeConfigParser object, so that we can add extra functionality # without clogging up lendingbot.py with all the config logic. For example, added a default value to get(). def init(file_location, data=None): global Data Data = data loaded_files = config.read(file_location) if len(loaded_files) != 1: import shutil # Copy default config file if not found try: shutil.copy('default.cfg.example', file_location) print '\ndefault.cfg.example has been copied to ' + file_location + '\n' \ 'Edit it with your API key and custom settings.\n' raw_input("Press Enter to acknowledge and exit...") exit(1) except Exception as ex: ex.message = ex.message if ex.message else str(ex) print("Failed to automatically copy config. Please do so manually. Error: {0}".format(ex.message)) exit(1) return config
def err_log(msg): rs_log.debug(msg, exc_info=True) sys.exit() if __name__ == "__main__": server = DispatcherSrv() for loc in ('IDB_PATH', 'USERPROFILE', 'HOME'): if loc in os.environ: confpath = os.path.join(os.path.realpath(os.environ[loc]), '.sync') if os.path.exists(confpath): config = SafeConfigParser({'host': HOST, 'port': PORT}) config.read(confpath) HOST = config.get("INTERFACE", 'host') PORT = config.getint("INTERFACE", 'port') server.announcement("configuration file loaded") break try: server.bind(HOST, PORT) except Exception as e: err_log("dispatcher failed to bind on %s:%s\n-> %s" % (HOST, PORT, repr(e))) try: server.loop() except Exception as e:
#parser.add_option('', # '--gdb-url', # default=False, # type='string', # help='url of GraceDB e.g. ') (opts, args) = parser.parse_args() opts.skip_gracedb_upload = (opts.gracedb_id == None) or opts.skip_gracedb_upload #================================================= # read relevant stuff from config file #================================================= config = SafeConfigParser() config.read(opts.config) ifo = config.get('general', 'ifo') tag = config.get('general', 'usertag') realtimedir = config.get('general', 'realtimedir') gdbdir = config.get('gdb general', 'main_gdb_dir') if not opts.skip_gracedb_upload: if config.has_option('gdb general', 'gdb_url'): gracedb = GraceDb(config.get('gdb general', 'gdb_url')) else: gracedb = GraceDb() ##########################################