Пример #1
0
def get_config_value(configuration_file, option, verbose=False):
    # mongodb config files do not have sections, so inject a dummy section
    # see: https://stackoverflow.com/questions/2819696/parsing-properties-file-in-python/
    config = io.StringIO()
    config.write('[dbdat]\n')
    config.write(open(configuration_file).read())
    config.seek(0, os.SEEK_SET)

    value = None

    try:
        configuration = configparser.configparser()
        configuration.readfp(config)

    except configparser.ParsingError as e:
        if verbose:
            print('Ignoring parsing errors:\n' + str(e))

    try:
        value = configuration.get('dbdat', option)

        # clean up required
        value = value.split()[0]
        value = value.translate(None, "'")

    except configparser.NoOptionError as e:
        value = None

    return value
Пример #2
0
def load_config(config_file_name):
    config = configparser()
    try:
        with open(config_file_name,
                  'r') as w:  #sanity check because configparser is stupid
            config.read(config_file_name)
        print("Config loaded succesfully!")

    except:
        print(
            "Please add your token and other information to winbot.ini, then run the script again."
        )
        config['winbot'] = {}
        config['winbot']['token'] = 'SetMeUp'
        config['winbot']['prefix'] = 'vm!'
        config['winbot']['vm_name'] = 'Example'
        config['winbot']['owner_id'] = '12345'
        config['winbot']['channel_id'] = '67890'
        with open(config_file_name, 'w') as f:
            config.write(f)
        sys.exit(1)

    if config['winbot']['token'] == 'SetMeUp' or len(
            config['winbot']['token']) < 8:
        print("winbot.ini contains invalid data. Winbot will now terminate.")
        sys.exit(1)
    else:
        return_config = []
        return_config.append(config['winbot']['token'])
        return_config.append(config['winbot']['prefix'])
        return_config.append(config['winbot']['vm_name'])
        return_config.append(int(config['winbot']['owner_id']))
        return_config.append(int(config['winbot']['channel_id']))
        return return_config
Пример #3
0
    def __init__(self, path, force=False):
        '''
        init worktree and git dir
        read Configuration file:
        a INI-like file with a single section ([core]) + three fields.
        '''
        self.worktree = path
        self.gitdir = os.path.join(path, '.git')

        if not (force or os.path.isdir(self.gitdir)):
            raise Exception('{0:s} is not a Git repository'.format(
                self.worktree))

        self.conf = configparser.configparser()
        # wait?
        cf = repo_file(self, 'config')
        if cf and os.path.exists(cf):
            self.config.Read([cf])
        elif not force:
            raise Exception('Configuration file missing.')

        if not force:
            vers = int(self.conf.get('core', 'repositoryformatversion'))
        if vers != 0:
            raise Exception(
                'Unsupported repositoryformatversion: {0:s}'.format(vers))
Пример #4
0
    def do_check(self, configuration_file):
        configuration = configparser.configparser()

        try:
            configuration.read(configuration_file)

        except configparser.ParsingError as e:
            if self.verbose:
                print('Ignoring parsing errors:\n' + str(e))

        try:
            general_log_file = configuration.get('mysqld', 'log_error')
            self.result['level'] = 'GREEN'
            self.result['output'] = 'Error log file enabled.'

            log_warnings = configuration.get('mysqld', 'log_warnings')

            if log_warnings < 2:
                self.result['level'] = 'RED'
                self.result[
                    'output'] = 'Error log not capturing access denied events.'

        except configparser.NoOptionError as e:
            self.result['level'] = 'RED'
            self.result['output'] = 'Error log file not enabled.'

        return self.result
Пример #5
0
 def mark(self,
          id,
          duedate,
          description,
          completed,
          done=None,
          delete=None):
     username = logon.checkauth()
     taskdir = gettaskdir(username)
     try:
         int(
             id, 16
         )  # check whether id is plain hex integer without nastiness like .. or /
     except ValueError:
         raise cherrypy.InternalRedirect(self.logoffpath)
     filename = os.path.join(taskdir, id + '.task')
     if done == "Done":
         d = configparser()
         with open(filename, "r") as file:
             d.readfp(file)
         if completed == "" or completed == "None":
             completed = date.today().isoformat()
         d.set('task', 'completed', completed)
         with open(filename, "w") as file:
             d.write(file)
     elif delete == "Del":
         os.unlink(filename)
     raise cherrypy.InternalRedirect(".")
Пример #6
0
	def index(self):
		username = logon.checkauth()
		taskdir = gettaskdir(username)
		tasklist = glob.glob(os.path.join(taskdir,'*.task'))
		tasks = ['<div class="header">Tasklist for user : <span class="highlight">%s</span><form class="logoff" action="%s" method="GET"><button type="submit" name="logoffurl" class="logoff-button" value="/">Log off</button></form></div>'%(username,self.logoffpath),
				'<div class="taskheader"><div class="left">Due date</div><div class="middle">Description</div><div class="right">Completed</div></div>',
				'<div id="items" class="ui-widget-content">']
		for filename in tasklist:
			d = configparser(defaults={'description':'','duedate':'','completed':None})
			id = os.path.splitext(os.path.basename(filename))[0]
			d.readfp(open(filename))
			description = d.get('task','description')
			duedate = d.get('task','duedate')
			completed = d.get('task','completed')
			tasks.append('''<form class="%s" action="mark" method="GET">
			<input type="text" class="duedate left" name="duedate" value="%s" readonly="readonly" />
			<input type="text" class="description middle" name="description" value="%s" readonly="readonly" />
			<input type="text" class="completed right editable-date tooltip" title="click to select a date, then click done" name="completed" value="%s" />
			<input type="hidden" name="id" value="%s" />
			<button type="submit" class="done-button" name="done" value="Done" >Done</button>
			<button type="submit" class="del-button" name="delete" value="Del" >Del</button>
			</form>
			'''%('notdone' if completed==None else 'done',duedate,description,completed,id))
		tasks.append('''<form class="add" action="add" method="GET">
			<input type="text" class="duedate left editable-date tooltip" name="duedate" title="click for a date" />
			<input type="text" class="description middle tooltip" title="click to enter a description" name="description"/>
			<button type="submit" class="add-button" name="add" value="Add" >Add</button>
			</form></div>
			''')
		return base_page%('itemlist',"".join(tasks))
Пример #7
0
def get_redis_config():
    config_path = '\\'.join([os.getcwd(), "\conf\config.properties"])
    config = configparser.configparser()
    config.read(config_path)
    host = config.get('redis', 'host')
    port = int(config.get('redis', 'port'))
    return host, port
    def do_check(self, configuration_file):
        configuration = configparser.configparser()

        try:
            configuration.read(configuration_file)

        except configparser.ParsingError as e:
            if self.verbose:
                print('Ignoring parsing errors:\n' + str(e))

        try:
            bind_address = configuration.get('httpd', 'bind_address')

            if '127.0.0.1' == bind_address or 'localhost' == bind_address:
                self.result['level']  = 'GREEN'
                self.result['output'] = 'Database listening on localhost only. (' + str(bind_address) + ')'
            else:
                self.result['level']  = 'YELLOW'
                self.result['output'] = 'Database listening is not localhost only (' + str(bind_address) + ')'

        except configparser.NoOptionError as e:
            self.result['level']  = 'GREEN'
            self.result['output'] = 'bind-address option not set, default is 127.0.0.1 or localhost.'

        return self.result
Пример #9
0
def get_statsd_config():
    config_path = '\\'.join([os.getcwd(), "\conf\config.properties"])
    config = configparser.configparser()
    config.read(config_path)
    host = config.get('statsd', 'host')
    port = int(config.get('statsd', 'port'))
    prefix = config.get('statsd', 'prefix')
    return host, port, prefix
Пример #10
0
 def __init__(self, configfile = "cluster.ini"):
     self.log = ACSLog("ACSUtils")
     self.log.debug("Reading config from " + configfile)
     defaults = {"orchestratorType": "Mesos"}
     config = configparser.configparser(defaults)
     config.read(configfile)
     if not config.has_option('Group', 'name'):
         config.set('Group', 'name', config.get('ACS', 'dnsPrefix'))
     self.config = config
Пример #11
0
 def __init__(self):
     self.config = configparser()
     x = path_join(getcwd(), 'periastron.conf')
     for filepath in ['etc/periastron.conf',
                      '/etc/periastron/periastron.conf',
                      '../etc/periastron.conf',
                      x]:
         if isfile(filepath):
             print('Using config file {}'.format(filepath))
             self.config.read(filepath)
             break
Пример #12
0
	def add(self,add,description,duedate):
		username = logon.checkauth()
		taskdir = gettaskdir(username)
		filename = os.path.join(taskdir,uuid().hex+'.task')
		d=configparser()
		d.add_section('task')
		d.set('task','description',description)
		d.set('task','duedate',duedate)
		with open(filename,"w") as file:
			d.write(file)
		raise cherrypy.InternalRedirect(".")
Пример #13
0
def parseParams_v1(fname):
    #print(fname)
    paramsDict = {}
    params = configparser.configparser()
    params.read(fname)
    for section in params.sections():
        paramsDict[section] = {}
        for datum in params.options(section):
            try:
                paramsDict[section].update(paramHandler_v1[section](datum, params.get(section, datum).split(' ')))
            except KeyError:
                raise KeyError('%s not a valid category. Valid categories are: %s' % (section, str(paramHandler_v1.keys())))

    paramsDict['Static AAs'] = [entry[1] for entry in paramsDict['Static Mods']]
    return paramsDict
Пример #14
0
	def mark(self,id,duedate,description,completed,done=None,delete=None):
		username = logon.checkauth()
		taskdir = gettaskdir(username)
		filename = os.path.join(taskdir,id+'.task')
		if done=="Done":
			print('####',id,duedate,description,completed,done,delete)
			d=configparser()
			with open(filename,"r") as file:
				d.readfp(file)
			if completed == ""  or completed == "None": completed = date.today().isoformat()
			d.set('task','completed',completed)
			with open(filename,"w") as file:
				d.write(file)
		elif delete=="Del":
			os.unlink(filename)
		raise cherrypy.InternalRedirect(".")
Пример #15
0
def read_worker_state():
    # Reads the state.config file and returns the values of pid, workspace_id, resource_running_version
    if os.path.isfile(WORKER_STATE_FILE_PATH):
        state = configparser.configparser()
        try:
            state.read(WORKER_STATE_FILE_PATH)
            pid = state.get(STATE_SECTION, PID)
            workspace_id = state.get(STATE_SECTION, WORKSPACE_ID)
            resource_running_version = state.get(STATE_SECTION,
                                                 DSC_RESOURCE_VERSION)
        except configparser.NoSectionError, exception:
            log(DEBUG, exception.message)
            raise configparser.Error(exception.message)

        except configparser.NoOptionError, exception:
            log(DEBUG, exception.message)
            raise configparser.Error(exception.message)
    def do_check(self, configuration_file):
        configuration = configparser.configparser()
        count = 0

        try:
            configuration.read(configuration_file)

        except configparser.ParsingError as e:
            if self.verbose:
                print('Ignoring parsing errors:\n' + str(e))

        try:
            error_log_file = configuration.get('mysqld', 'log_error')[0]

            if os.path.isfile(str(error_log_file)):
                with open(str(error_log_file), 'r') as log:
                    for line in log:
                        if "Access denied for user '" + self.appuser + "'" in line:
                            count += 1

                if 0 == count:
                    self.result['level'] = 'GREEN'
                    self.result[
                        'output'] = 'No access denied events found for user.'

                elif count < 5:
                    self.result['level'] = 'YELLOW'
                    self.result[
                        'output'] = 'Access denied events found for user.'

                else:
                    self.result['level'] = 'RED'
                    self.result[
                        'output'] = 'Excessive access denied events found for user.'

            else:
                self.result['level'] = 'YELLOW'
                self.result[
                    'output'] = 'Could not access error log file ' + str(
                        error_log_file) + '.'

        except configparser.NoOptionError as e:
            self.result['level'] = 'RED'
            self.result['output'] = 'Error log file not enabled.'

        return self.result
    def do_check(self, configuration_file):
        configuration = configparser.configparser()

        try:
            configuration.read(configuration_file)

        except configparser.ParsingError as e:
            if self.verbose:
                print('Ignoring parsing errors:\n' + str(e))

        try:
            general_log_file = configuration.get('mysqld', 'general_log_file')

        except configparser.NoOptionError as e:
            self.result['level']  = 'YELLOW'
            self.result['output'] = 'General log file not enabled.'

        return self.result
Пример #18
0
	def mark(self,id,duedate,description,completed,done=None,delete=None):
		username = logon.checkauth()
		taskdir = gettaskdir(username)
		try:
			int(id,16) # check whether id is plain hex integer without nastiness like .. or /  
		except ValueError:
			raise cherrypy.InternalRedirect(self.logoffpath)
		filename = os.path.join(taskdir,id+'.task')
		if done=="Done":
			d=configparser()
			with open(filename,"r") as file:
				d.readfp(file)
			if completed == ""  or completed == "None": completed = date.today().isoformat()
			d.set('task','completed',completed)
			with open(filename,"w") as file:
				d.write(file)
		elif delete=="Del":
			os.unlink(filename)
		raise cherrypy.InternalRedirect(".")
Пример #19
0
def	main(argv):
	global username, password, host, device_id
	try:
	  opts, args = getopt.getopt(argv,"hc:",["config="])
	except getopt.GetoptError:
		help()
	for opt, arg in opts:
		if opt == '-c':
			cfg = arg
			config = configparser()
			config.read_file(open(cfg))
			username = config['dysondata']['username']
			password = config['dysondata']['password']
			host = config['dysondata']['host']
			device_id = config['dysondata']['device_id']
			port = config.getint('dysondata','port')
			interval = config.getint('dysondata','interval')
			start_client(host, username, password, port, interval)
		else:
			help()
    def do_check(self, configuration_file):
        configuration = configparser.configparser()

        try:
            configuration.read(configuration_file)

        except configparser.ParsingError as e:
            if self.verbose:
                print('Ignoring parsing errors:\n' + str(e))

        try:
            general_log_file = configuration.get('client', 'password')

            # if the option is found, then red!
            self.result['level'] = 'RED'
            self.result['output'] = 'Client password is in use.'
        except configparser.NoOptionError as e:
            self.result['level'] = 'GREEN'
            self.result['output'] = 'Client password not used.'

        return self.result
Пример #21
0
def login(user,password):
    '''
    登录功能。
    验证密码三次错误或用户名输入错误直接返回0
    登录成功返回1并将文件中登录次数置写0
    密码错误返回2并记录登录次数
    '''
    cf = configparser.configparser()
    cf.read("simulateDB.cfg")
    jusrinfo = cf.get('user','usrinfo')
    usrinfo = json.loads(jusrinfo)
    authority = 0
    role = 0
    if usrinfo['user'] == user and usrinfo['times'] < 3:
        if usrinfo['password'] == password:
            usrinfo['times'] = 0
            flag = 1
        else:
            usrinfo['times'] += 1
            flag = 2
    else:
        flag = 0
    if flag:
        jcardfloatinfo = json.dumps(usrinfo)
        cf.set('user','usrinfo',jcardfloatinfo)
        cf.write(open('simulateDB.cfg', "w"))
        if flag == 1:
            jroleinfo = cf.get('role','roleinfo')
            print(jroleinfo)
            roleinfo = json.loads(jroleinfo)
            for key in roleinfo:
                if user in roleinfo[key]:
                    role = key
                    print(role)
                    authority = cf.get('authority',role)
                    break
    return str(flag),role,authority
Пример #22
0
import os, configparser
#Check if .pyscholar exists and create it if necessary
# chmod(0777)
pyscholarDir = os.path.join(os.path.expanduser("~user"), "pyscholar")
if not os.path.exists(pyscholarDir):
    os.makedirs(pyscholarDir)

#Same for .pyscholar/keys.cfg
if not os.path.exists(os.path.join(pyscholarDir, "keys.cfg")):
    keysParser = configparser.configparser()
    keysParser.add_section("Keys")
    keysParser.set('Keys', 'Scopus', "")
    originalMask = os.umask(0)
    keysDescriptor = os.open(os.path.join(pyscholarDir,"keys.cfg"),os.O_WRONLY | os.O_CREAT)
    keysFile = os.fdopen(keysDescriptor, 'w')
    os.umask(originalMask)
    keysParser.write(keysFile)
    keysFile.close()

#import scopus
#import network
Пример #23
0
    def parse_config(self, argv):
        def join_conf_dir(path):
            # Turn relative paths to absolute paths, depending on the
            # location of the INI (or rather CONF_DIR which by its definition
            # is the location of the INI).
            return path if os.path.isabs(path) else os.path.join(
                CONF_DIR, path)

        config = configparser()
        if len(argv) == 1:
            config_file = "my-site"
        else:
            config_file = argv[1]
        config_path = os.path.join(CONF_DIR, config_file + '.ini')
        logger.info("Reading %s", config_path)
        config.read(config_path)
        self.config = {}
        try:
            self.config['server'] = {}
            self.config['server']['name'] = config_file
            self.config['server']['loglevel'] = int(
                config['server']['loglevel'])
            self.config['server']['host'] = config['server']['host']
            self.config['server']['port'] = int(config['server']['port'])
            self.config['server']['enable_mail'] = config['server'][
                'enable_mail']
            self.config['server']['repo_dir'] = join_conf_dir(
                config['server']['repo_dir'])
            self.config['server']['temp_repo_dir'] = join_conf_dir(
                config['server']['temp_repo_dir'])
            self.config['server']['valid_languages'] = config['server'][
                'valid_languages']
            if config['server']['max_threads'] == 'max':
                self.config['server'][
                    'max_threads'] = multiprocessing.cpu_count()
            else:
                self.config['server']['max_threads'] = int(
                    config['server']['max_threads'])
            self.config['targets'] = {}
            for section in config.sections():
                if not str(section).startswith("target_"):
                    continue
                self.config['targets'][config[section]['name']] = {}
                self.config['targets'][
                    config[section]['name']]['name'] = config[section]['name']
                self.config['targets'][
                    config[section]['name']]['template_dir'] = join_conf_dir(
                        config[section]['template_dir'])
                self.config['targets'][config[section]['name']][
                    'active'] = config[section]['active']
                self.config['targets'][config[section]['name']][
                    'draft'] = config[section]['draft']
                self.config['targets'][config[section]['name']][
                    'remarks'] = config[section]['remarks']
                self.config['targets'][
                    config[section]['name']]['meta'] = config[section]['meta']
                self.config['targets'][config[section]['name']][
                    'default_xslt_params'] = join_conf_dir(
                        config[section]['default_xslt_params'])
                self.config['targets'][
                    config[section]['name']]['enable_target_sync'] = config[
                        section]['enable_target_sync']
                if config[section]['enable_target_sync'] == 'yes':
                    self.config['targets'][config[section]['name']][
                        'target_path'] = config[section]['target_path']
                self.config['targets'][
                    config[section]['name']]['backup_path'] = join_conf_dir(
                        config[section]['backup_path'])
                self.config['targets'][config[section]
                                       ['name']]['config_dir'] = join_conf_dir(
                                           config[section]['config_dir'])
                self.config['targets'][config[section]['name']][
                    'languages'] = config[section]['languages']
                self.config['targets'][config[section]['name']][
                    'default_lang'] = config[section]['default_lang']
                self.config['targets'][config[section]['name']][
                    'omit_default_lang_path'] = config[section][
                        'omit_default_lang_path']
                self.config['targets'][config[section]['name']][
                    'internal'] = config[section]['internal']
                self.config['targets'][config[section]['name']][
                    'zip_formats'] = config[section]['zip_formats']
                self.config['targets'][config[section]['name']][
                    'server_base_path'] = config[section]['server_base_path']
                self.config['targets'][
                    config[section]['name']]['canonical_url_domain'] = config[
                        section]['canonical_url_domain']
                self.config['targets'][config[section]['name']][
                    'server_root_files'] = join_conf_dir(
                        config[section]['server_root_files'])

                self.config['targets'][
                    config[section]['name']]['enable_ssi_fragments'] = config[
                        section]['enable_ssi_fragments']
                if config[section]['enable_ssi_fragments'] == 'yes':
                    self.config['targets'][config[section]['name']][
                        'fragment_dir'] = join_conf_dir(
                            config[section]['fragment_dir'])
                    self.config['targets'][config[section]['name']][
                        'fragment_l10n_dir'] = join_conf_dir(
                            config[section]['fragment_l10n_dir'])
                # FIXME: I guess this is not the prettiest way to handle
                # optional values (but it works for now)
                self.config['targets'][config[section]
                                       ['name']]['build_container'] = False
                if 'build_container' in list(config[section].keys()):
                    self.config['targets'][config[section]['name']][
                        'build_container'] = config[section]['build_container']

                self.config['targets'][config[section]['name']][
                    'site_sections'] = config[section]['site_sections']
                self.config['targets'][
                    config[section]['name']]['default_site_section'] = config[
                        section]['default_site_section']

        except KeyError as error:
            logger.warning(
                "Invalid configuration file, missing configuration key %s. Exiting.",
                error)
            sys.exit(1)
Пример #24
0
 def parse_config(self, argv):
     config = configparser()
     if len(argv) == 1:
         config_file = "docserv"
     else:
         config_file = argv[1]
     config_path = os.path.join(CONF_DIR, config_file + '.ini')
     logger.info("Reading %s", config_path)
     config.read(config_path)
     self.config = {}
     try:
         self.config['server'] = {}
         self.config['server']['name'] = config_file
         self.config['server']['loglevel'] = int(
             config['server']['loglevel'])
         self.config['server']['host'] = config['server']['host']
         self.config['server']['port'] = int(config['server']['port'])
         self.config['server']['repo_dir'] = config['server']['repo_dir']
         self.config['server']['temp_repo_dir'] = config['server'][
             'temp_repo_dir']
         self.config['server']['valid_languages'] = config['server'][
             'valid_languages']
         self.config['server']['max_threads'] = int(
             config['server']['max_threads'])
         self.config['targets'] = {}
         for section in config.sections():
             if not str(section).startswith("target_"):
                 continue
             self.config['targets'][config[section]['name']] = {}
             self.config['targets'][
                 config[section]['name']]['name'] = config[section]['name']
             self.config['targets'][config[section]['name']][
                 'template_dir'] = config[section]['template_dir']
             self.config['targets'][config[section]['name']][
                 'active'] = config[section]['active']
             self.config['targets'][config[section]['name']][
                 'draft'] = config[section]['draft']
             self.config['targets'][config[section]['name']][
                 'remarks'] = config[section]['remarks']
             self.config['targets'][
                 config[section]['name']]['meta'] = config[section]['meta']
             self.config['targets'][
                 config[section]['name']]['default_xslt_params'] = config[
                     section]['default_xslt_params']
             self.config['targets'][config[section]['name']][
                 'target_path'] = config[section]['target_path']
             self.config['targets'][config[section]['name']][
                 'backup_path'] = config[section]['backup_path']
             self.config['targets'][config[section]['name']][
                 'config_dir'] = config[section]['config_dir']
             self.config['targets'][config[section]['name']][
                 'languages'] = config[section]['languages']
             self.config['targets'][config[section]['name']][
                 'default_lang'] = config[section]['default_lang']
             self.config['targets'][config[section]['name']][
                 'omit_default_lang_path'] = config[section][
                     'omit_default_lang_path']
             self.config['targets'][config[section]['name']][
                 'internal'] = config[section]['internal']
             self.config['targets'][config[section]['name']][
                 'zip_formats'] = config[section]['zip_formats']
             self.config['targets'][config[section]['name']][
                 'server_base_path'] = config[section]['server_base_path']
             self.config['targets'][
                 config[section]['name']]['canonical_url_domain'] = config[
                     section]['canonical_url_domain']
             self.config['targets'][config[section]['name']][
                 'htaccess'] = config[section]['htaccess']
             self.config['targets'][config[section]['name']][
                 'favicon'] = config[section]['favicon']
     except KeyError as error:
         logger.warning(
             "Invalid configuration file, missing configuration key '%s'. Exiting.",
             error)
         sys.exit(1)
def main():

    # set up command line argument processing
    parser = argparse.ArgumentParser()

    # options
    parser.add_argument(
        "-v",
        "--verbose",
        help="increase output verbosity",
        action="store_true",
        default=False,
    )
    parser.add_argument(
        "-n",
        "--dry-run",
        help="Process data but don't save results",
        action="store_true",
        default=False,
    )

    parser.add_argument(
        "-p",
        "--aggregation-period",
        help="Number of Days to Aggregate (default=1)",
        nargs="?",
        type=int,
        choices=range(1, 5, 2),
        default=1,
    )

    # positional arguments
    parser.add_argument("site", help="PhenoCam site name")
    parser.add_argument("roiname", help="ROI name, e.g. canopy_0001")

    # get args
    args = parser.parse_args()
    sitename = args.site
    roiname = args.roiname
    verbose = args.verbose
    dryrun = args.dry_run
    ndays = args.aggregation_period

    if verbose:
        print("site: {0}".format(sitename))
        print("roiname: {0}".format(roiname))
        print("verbose: {0}".format(verbose))
        print("dryrun: {0}".format(dryrun))
        print("period: {0}".format(ndays))

    # read in config file for this ROI List if it exists
    config_file = "{0}_{1}.cfg".format(sitename, roiname)
    config_path = os.path.join(archive_dir, sitename, "ROI", config_file)
    if os.path.exists(config_path):
        # NOTE: should probably subclass safe config parser
        # and add gettime() method which checks for time validity
        cfgparser = configparser(
            defaults={
                "nimage_threshold": str(default_nimage_threshold),
                "time_min": str(default_time_min),
                "time_max": str(default_time_max),
                "sunelev_min": str(default_sunelev_min),
                "brt_min": str(default_brt_min),
                "brt_max": str(default_brt_max),
            }
        )

        cfgparser.read(config_path)

        if cfgparser.has_section("gcc90_calculation"):
            nimage_threshold = cfgparser.getint("gcc90_calculation", "nimage_threshold")
            time_max_str = cfgparser.get("gcc90_calculation", "time_max")
            [tmax_hr, tmax_mn, tmax_sc] = time_max_str.split(":")
            time_max = time(int(tmax_hr), int(tmax_mn), int(tmax_sc))
            time_min_str = cfgparser.get("gcc90_calculation", "time_min")
            [tmin_hr, tmin_mn, tmin_sc] = time_min_str.split(":")
            time_min = time(int(tmin_hr), int(tmin_mn), int(tmin_sc))
            sunelev_min = cfgparser.getfloat("gcc90_calculation", "sunelev_min")
            brt_min = cfgparser.getint("gcc90_calculation", "brt_min")
            brt_max = cfgparser.getint("gcc90_calculation", "brt_max")
        else:
            nimage_threshold = int(default_nimage_threshold)
            [tmax_hr, tmax_mn, tmax_sc] = default_time_max.split(":")
            time_max = time(int(tmax_hr), int(tmax_mn), int(tmax_sc))
            [tmin_hr, tmin_mn, tmin_sc] = default_time_min.split(":")
            time_min = time(int(tmin_hr), int(tmin_mn), int(tmin_sc))
            sunelev_min = default_sunelev_min
            brt_min = default_brt_min
            brt_max = default_brt_max

    else:
        nimage_threshold = int(default_nimage_threshold)
        [tmax_hr, tmax_mn, tmax_sc] = default_time_max.split(":")
        time_max = time(int(tmax_hr), int(tmax_mn), int(tmax_sc))
        [tmin_hr, tmin_mn, tmin_sc] = default_time_min.split(":")
        time_min = time(int(tmin_hr), int(tmin_mn), int(tmin_sc))
        sunelev_min = default_sunelev_min
        brt_min = default_brt_min
        brt_max = default_brt_max

    # print config values
    if verbose:
        print("")
        print("gcc config:")
        print("===========")
        print("roi_list: ", "{0}_{1}_roi.csv".format(sitename, roiname))
        if os.path.exists(config_path):
            print("config file: {0}".format(config_file))
        else:
            print("config file: None")
        print("nimage threshold: ", nimage_threshold)
        print("time of day min: ", time_min)
        print("time of day max: ", time_max)
        print("sun elev min: ", sunelev_min)
        print("aggregate days: ", ndays)
        print("minimum brightness: ", brt_min)
        print("maximum brightness: ", brt_max)

    # set up output filename
    outdir = os.path.join(archive_dir, sitename, "ROI")
    outfile = "{0}_{1}_{2}day.csv".format(sitename, roiname, ndays)
    outpath = os.path.join(outdir, outfile)
    if verbose:
        print("output file: ", outfile)

    # create gcc timeseries object as empty list
    gcc_ts = GCCTimeSeries(
        site=sitename,
        ROIListID=roiname,
        nday=ndays,
        nmin=nimage_threshold,
        tod_min=time_min,
        tod_max=time_max,
        sunelev_min=sunelev_min,
        brt_min=brt_min,
        brt_max=brt_max,
    )

    # get roi timeseries for this site and roi
    roits = get_roi_timeseries(sitename, roiname)

    if verbose:
        print("")
        print("ROI timeseries info:")
        print("====================")
        print("site: ", roits.site)
        print("ROI list id: ", roits.roilistid)
        print("create date: ", roits.created_at)
        print("update date: ", roits.updated_at)
        print("nrows: ", len(roits.rows))

    # make list of rows which match image selection criteria
    roits_rows = roits.select_rows(
        tod_min=time_min,
        tod_max=time_max,
        sunelev_min=sunelev_min,
        brt_min=brt_min,
        brt_max=brt_max,
    )

    # check that some rows passed selection criteria
    nrows = len(roits_rows)
    if nrows == 0:
        print("No rows passed the selection criteria")
        return

    if verbose:
        print("Number of selected rows: {0}".format(nrows))

    # make a list of dates for selected images
    img_date = []
    for row in roits_rows:
        img_date.append(row["datetime"].date())

    # list is ordered so find first and last dates
    dt_first = img_date[0]
    dt_last = img_date[nrows - 1]
    if verbose:
        print("date first: {}".format(dt_first))
        print("date last: {}".format(dt_last))

    # set up a generator which yields dates for the start
    # of the next nday period covering the date range of image
    gcc_dr = daterange2(dt_first, dt_last, ndays)

    # calculate offset for timeseries based on nday
    day_offset = ndays / 2
    date_offset = timedelta(days=day_offset)

    # roits_ndx will be index into ROI timeseries
    roits_ndx = 0

    # set up vars for accumulating stats
    img_cnt = 0
    filenames = []
    r_dn_vals = []
    rcc_vals = []
    g_dn_vals = []
    gcc_vals = []
    b_dn_vals = []
    bcc_vals = []
    solar_elev_vals = []
    midday_delta_vals = []

    # loop over nday time periods
    for gcc_ndx, start_date in enumerate(gcc_dr):

        end_date = start_date + timedelta(ndays)
        gcc_date = start_date + date_offset
        doy = gcc_date.timetuple().tm_yday
        midday_noon = datetime(gcc_date.year, gcc_date.month, gcc_date.day, 12, 0, 0)

        # get roits rows for this time period
        while (
            roits_ndx < nrows
            and img_date[roits_ndx] >= start_date
            and img_date[roits_ndx] < end_date
        ):

            # skip this row if awbflag is 1
            if roits_rows[roits_ndx]["awbflag"] == 1:
                if roits_ndx < nrows:
                    roits_ndx += 1
                    continue
                else:
                    break

            filenames.append(roits_rows[roits_ndx]["filename"])
            r_dn = roits_rows[roits_ndx]["r_mean"]
            r_dn_vals.append(r_dn)
            g_dn = roits_rows[roits_ndx]["g_mean"]
            g_dn_vals.append(g_dn)
            b_dn = roits_rows[roits_ndx]["b_mean"]
            b_dn_vals.append(b_dn)
            dnsum = r_dn + g_dn + b_dn

            # NOTE: I'm recomputing gcc, rcc, bcc from DN values rather
            # than using value stored in roistats CSV
            if dnsum <= 0:
                rcc = np.nan
                bcc = np.nan
                gcc = np.nan
            else:
                img_cnt += 1
                rcc = r_dn / dnsum
                bcc = b_dn / dnsum
                gcc = roits_rows[roits_ndx]["gcc"]

            solar_elev = roits_rows[roits_ndx]["solar_elev"]

            # note that rcc_vals can include NaN's
            rcc_vals.append(rcc)
            gcc_vals.append(gcc)
            bcc_vals.append(bcc)
            solar_elev_vals.append(solar_elev)
            midday_td = roits_rows[roits_ndx]["datetime"] - midday_noon
            midday_td_secs = np.abs(midday_td.days * 86400 + midday_td.seconds)
            midday_delta_vals.append(midday_td_secs)

            if roits_ndx < nrows:
                roits_ndx += 1
            else:
                break

        # check to see if we got any (good) images
        if img_cnt == 0:
            # nodata for this time period
            image_count = 0
            midday_filename = ND_STRING
            midday_r = ND_FLOAT
            midday_g = ND_FLOAT
            midday_b = ND_FLOAT
            midday_gcc = ND_FLOAT
            midday_rcc = ND_FLOAT
            r_mean = ND_FLOAT
            r_std = ND_FLOAT
            g_mean = ND_FLOAT
            g_std = ND_FLOAT
            b_mean = ND_FLOAT
            b_std = ND_FLOAT
            gcc_mean = ND_FLOAT
            gcc_std = ND_FLOAT
            gcc_50 = ND_FLOAT
            gcc_75 = ND_FLOAT
            gcc_90 = ND_FLOAT
            rcc_mean = ND_FLOAT
            rcc_std = ND_FLOAT
            rcc_50 = ND_FLOAT
            rcc_75 = ND_FLOAT
            rcc_90 = ND_FLOAT
            max_solar_elev = ND_FLOAT
            snow_flag = ND_INT
            outlierflag_gcc_mean = ND_INT
            outlierflag_gcc_50 = ND_INT
            outlierflag_gcc_75 = ND_INT
            outlierflag_gcc_90 = ND_INT

        # got some good images but not enough - probably there
        # are cases where this will fail e.g. not images on the
        # midday of a 3-day aggregation period.
        elif img_cnt < nimage_threshold:
            # not enough images
            image_count = img_cnt
            # find nearest image to midday (noon) on mid-interval date
            mi_ndx = midday_delta_vals.index(min(midday_delta_vals))
            midday_filename = filenames[mi_ndx]
            midday_r = r_dn_vals[mi_ndx]
            midday_g = g_dn_vals[mi_ndx]
            midday_b = b_dn_vals[mi_ndx]
            midday_gcc = gcc_vals[mi_ndx]
            midday_rcc = rcc_vals[mi_ndx]

            # no stats for this time interval
            r_mean = ND_FLOAT
            r_std = ND_FLOAT
            g_mean = ND_FLOAT
            g_std = ND_FLOAT
            b_mean = ND_FLOAT
            b_std = ND_FLOAT
            gcc_mean = ND_FLOAT
            gcc_std = ND_FLOAT
            gcc_50 = ND_FLOAT
            gcc_75 = ND_FLOAT
            gcc_90 = ND_FLOAT
            rcc_mean = ND_FLOAT
            rcc_std = ND_FLOAT
            rcc_50 = ND_FLOAT
            rcc_75 = ND_FLOAT
            rcc_90 = ND_FLOAT
            max_solar_elev = max(solar_elev_vals)
            snow_flag = ND_INT
            outlierflag_gcc_mean = ND_INT
            outlierflag_gcc_50 = ND_INT
            outlierflag_gcc_75 = ND_INT
            outlierflag_gcc_90 = ND_INT

        # stats for this period should be complete - only
        # snow flags are missing data
        else:
            # find nearest image to midday (noon) on mid-interval date
            mi_ndx = midday_delta_vals.index(min(midday_delta_vals))
            midday_filename = filenames[mi_ndx]
            midday_r = r_dn_vals[mi_ndx]
            midday_g = g_dn_vals[mi_ndx]
            midday_b = b_dn_vals[mi_ndx]
            midday_gcc = gcc_vals[mi_ndx]
            midday_rcc = rcc_vals[mi_ndx]

            # get stats for this time interval
            image_count = img_cnt
            r_mean = np.nanmean(r_dn_vals)
            r_std = np.nanstd(r_dn_vals)
            g_mean = np.nanmean(g_dn_vals)
            g_std = np.nanstd(g_dn_vals)
            b_mean = np.nanmean(b_dn_vals)
            b_std = np.nanstd(b_dn_vals)
            gcc_mean = np.nanmean(gcc_vals)
            gcc_std = np.nanstd(gcc_vals)
            gcc_50 = quantile(gcc_vals, 0.5)
            gcc_75 = quantile(gcc_vals, 0.75)
            gcc_90 = quantile(gcc_vals, 0.9)
            rcc_mean = np.nanmean(rcc_vals)
            rcc_std = np.nanstd(rcc_vals)
            rcc_50 = quantile(rcc_vals, 0.5)
            rcc_75 = quantile(rcc_vals, 0.75)
            rcc_90 = quantile(rcc_vals, 0.9)
            max_solar_elev = max(solar_elev_vals)
            snow_flag = ND_INT
            outlierflag_gcc_mean = ND_INT
            outlierflag_gcc_50 = ND_INT
            outlierflag_gcc_75 = ND_INT
            outlierflag_gcc_90 = ND_INT

        # append to gcc timeseries
        gcc_ts_row = gcc_ts.insert_row(
            gcc_date,
            doy,
            image_count,
            midday_filename,
            midday_r,
            midday_g,
            midday_b,
            midday_gcc,
            midday_rcc,
            r_mean,
            r_std,
            g_mean,
            g_std,
            b_mean,
            b_std,
            gcc_mean,
            gcc_std,
            gcc_50,
            gcc_75,
            gcc_90,
            rcc_mean,
            rcc_std,
            rcc_50,
            rcc_75,
            rcc_90,
            max_solar_elev,
            snow_flag,
            outlierflag_gcc_mean,
            outlierflag_gcc_50,
            outlierflag_gcc_75,
            outlierflag_gcc_90,
        )

        # print(result if verbose)
        if verbose:
            csvstr = gcc_ts.format_csvrow(gcc_ts_row)
            print(csvstr)

        # reset accumulated values
        img_cnt = 0
        filenames = []
        r_dn_vals = []
        rcc_vals = []
        g_dn_vals = []
        gcc_vals = []
        b_dn_vals = []
        bcc_vals = []
        solar_elev_vals = []
        midday_delta_vals = []

    if dryrun:
        nout = 0
    else:
        nout = gcc_ts.writeCSV(outpath)

    print("Total: %d" % (nout,))
def main():
    # set up command line argument processing
    parser = argparse.ArgumentParser()

    # options
    parser.add_argument(
        "-v",
        "--verbose",
        help="increase output verbosity",
        action="store_true",
        default=False,
    )
    parser.add_argument(
        "-n",
        "--dry-run",
        help="Process data but don't save results",
        action="store_true",
        default=False,
    )

    # positional arguments
    parser.add_argument("site", help="PhenoCam site name")
    parser.add_argument("roiname", help="ROI name, e.g. canopy_0001")

    # get args
    args = parser.parse_args()
    sitename = args.site
    roiname = args.roiname
    verbose = args.verbose
    dryrun = args.dry_run

    if verbose:
        print("site: {0}".format(sitename))
        print("roiname: {0}".format(roiname))
        print("verbose: {0}".format(verbose))
        print("dryrun: {0}".format(dryrun))

    # set output filename
    inname = "%s_%s_roistats.csv" % (sitename, roiname)
    outname = inname
    inpath = os.path.join(archive_dir, sitename, "ROI", outname)
    outpath = inpath
    if verbose:
        print("output file: {0}".format(outname))

    # get ROI list
    roi_list = get_roi_list(sitename, roiname)

    # read existing CSV file - since this is an update throw
    # exception if the file doesn't already exist
    try:
        roits = ROITimeSeries(site=sitename, ROIListID=roiname)
        roits.readCSV(outpath)
    except IOError:
        errmsg = "Unable to read CSV file: {0}\n".format(outpath)
        sys.stderr.write(errmsg)
        sys.exit(1)

    # read in config file for this site if it exists
    config_file = "{0}_{1}.cfg".format(sitename, roiname)
    config_path = os.path.join(archive_dir, sitename, "ROI", config_file)
    if os.path.exists(config_path):
        cfgparser = configparser(defaults={"resize": str(default_resize)})
        cfgparser.read(config_path)
        if cfgparser.has_section("roi_timeseries"):
            resizeFlg = cfgparser.getboolean("roi_timeseries", "resize")
        else:
            resizeFlg = default_resize

        # verify that config matches CSV header!
        if resizeFlg != roits.resizeFlg:
            errmsg = "resize flag from config doesn't match CSV header\n"
            sys.stderr.write(errmsg)
            sys.exit(1)

    else:
        resizeFlg = default_resize

    # print config values
    if verbose:
        print("")
        print("ROI timeseries config:")
        print("======================")
        print("roi_list: ", "{0}_{1}_roi.csv".format(sitename, roiname))
        if os.path.exists(config_path):
            print("config file: {0}".format(config_file))
        else:
            print("config file: None")
        print("Resize Flag: ", resizeFlg)

    # get list of images already in CSV
    old_imglist = roits.get_image_list()

    # find last dt in current timeseries CSV
    nlast = len(roits.rows) - 1
    dt_last = roits.rows[nlast]["datetime"]

    # add five seconds so that we don't reprocess last image
    dt_last = dt_last + timedelta(seconds=5)

    # start with images newer than last dt
    dt_start = dt_last

    if verbose:
        print("last image at: {0}".format(dt_last))

    # loop over mask entries in ROI list
    nimage = 0
    nupdate = 0
    for imask, roimask in enumerate(roi_list.masks):

        roi_startDT = roimask["start_dt"]
        roi_endDT = roimask["end_dt"]

        # skip this ROI maskfile if it's validity interval ends
        # before last date before update
        if roi_endDT < dt_start:
            continue

        # start_date = roi_startDT.date()
        # end_date = roi_endDT.date()
        # start_time = roi_startDT.time()
        # end_time = roi_endDT.time()
        maskfile = roimask["maskfile"]

        # okay set the start datetime to the larger of dt_start (from
        # last row of existing timeseries CSV) and the beginning of
        # the ROI validity.  We need to do this for the case where
        # there is a gap between last row of CSV and beginning of next
        # validity interval.  This will often be the case when there
        # are a series of "transitional images" between two
        # stable/useful camera positions.
        if dt_start < roi_startDT:
            dt_start = roi_startDT

        mask_path = os.path.join(archive_dir, sitename, "ROI", maskfile)
        # print roi_path
        try:
            mask_img = Image.open(mask_path)
        except Exception:
            sys.stderr.write("Unable to open ROI mask file\n")
            sys.exit(1)

        # check that mask_img is in expected form
        mask_mode = mask_img.mode
        if mask_mode != "L":

            # convert to 8-bit mask
            mask_img = mask_img.convert("L")

        # make a numpy mask
        roimask = np.asarray(mask_img, dtype=np.bool8)

        # get list of images for this timeperiod
        imglist = utils.getsiteimglist(
            sitename, getIR=False, startDT=dt_start, endDT=roi_endDT
        )

        nimage += len(imglist)
        for impath in imglist:

            if debug:
                print(maskfile, impath)

            # check if image already exists in list -- just to be
            # sure!
            fn = os.path.basename(impath)
            try:
                row_index = old_imglist.index(fn)
            except Exception:
                row_index = None

            # append/insert row for this image/mask - shouldn't happen
            # but just to be on safe side!
            if row_index:
                roits_row = roits.insert_row(impath, roimask, imask + 1)
            else:
                roits_row = roits.append_row(impath, roimask, imask + 1)

            # check that we could append/insert a row
            if roits_row:
                nupdate += 1
            else:
                continue

            if verbose:
                csvstr = roits.format_csvrow(roits_row)
                print(csvstr)

            if debug:
                if nupdate == 10:
                    break

    # output CSV file
    if dryrun:
        nout = 0
    else:
        nout = roits.writeCSV(outpath)

    print("Images processed: %d" % (nimage,))
    print("Images added to CSV: %d" % (nupdate,))
    print("Total: %d" % (nout,))
Пример #27
0
def main():
    """
    generate IR ROI timeseries from a PhenoCam directory of images
    """

    # set up command line argument processing
    parser = argparse.ArgumentParser()

    # options
    parser.add_argument(
        "-v",
        "--verbose",
        help="increase output verbosity",
        action="store_true",
        default=False,
    )
    parser.add_argument(
        "-n",
        "--dry-run",
        help="Process data but don't save results",
        action="store_true",
        default=False,
    )

    # positional arguments
    parser.add_argument("site", help="PhenoCam site name")
    parser.add_argument("roiname", help="ROI name, e.g. DB_0001")

    # get args
    args = parser.parse_args()
    sitename = args.site
    roiname = args.roiname
    verbose = args.verbose
    dryrun = args.dry_run

    if verbose:
        print("site: {0}".format(sitename))
        print("roiname: {0}".format(roiname))
        print("verbose: {0}".format(verbose))
        print("dryrun: {0}".format(dryrun))

    # set output filename
    outname = "%s_%s_IR_roistats.csv" % (sitename, roiname)
    outdir = os.path.join(archive_dir, sitename, "ROI")
    outpath = os.path.join(outdir, outname)
    if verbose:
        print("archive dir: {0}".format(archive_dir))
        print("output file: {0}".format(outname))

    # read in config file for this site if it exists
    config_file = "{0}_{1}.cfg".format(sitename, roiname)
    config_path = os.path.join(archive_dir, sitename, "ROI", config_file)
    if os.path.exists(config_path):
        cfgparser = configparser(defaults={"resize": str(default_resize)})
        cfgparser.read(config_path)
        if cfgparser.has_section("roi_timeseries"):
            resizeFlg = cfgparser.getboolean("roi_timeseries", "resize")
        else:
            resizeFlg = default_resize

    else:
        resizeFlg = default_resize

    # print config values
    if verbose:
        print("")
        print("ROI IR timeseries config:")
        print("=========================")
        print("roi_list: ", "{0}_{1}_roi.csv".format(sitename, roiname))
        if os.path.exists(config_path):
            print("config file: {0}".format(config_file))
        else:
            print("config file: None")
        print("Resize Flag: ", resizeFlg)

    # create new roi_timeseries object for this ROIList
    roits = IRROITimeSeries(site=sitename,
                            ROIListID=roiname,
                            resizeFlag=resizeFlg)

    # grab roi list
    roi_list = get_roi_list(sitename, roiname)

    # loop over mask entries in ROI list
    nimage = 0
    nupdate = 0
    for roimask_index, roimask in enumerate(roi_list.masks):

        startDT = roimask["start_dt"]
        endDT = roimask["end_dt"]
        maskfile = roimask["maskfile"]

        mask_path = os.path.join(archive_dir, sitename, "ROI", maskfile)
        # open roi mask file
        try:
            mask_img = Image.open(mask_path)

        except IOError:
            sys.stderr.write("Unable to open ROI mask file\n")
            sys.exit(1)

        # check that mask_img is in expected form
        mask_mode = mask_img.mode
        if mask_mode != "L":

            # convert to 8-bit mask
            mask_img = mask_img.convert("L")

        # make a numpy mask
        roimask = np.asarray(mask_img, dtype=np.bool8)

        # get list of images for this timeperiod
        imglist = utils.getsiteimglist(sitename,
                                       getIR=True,
                                       startDT=startDT,
                                       endDT=endDT)

        nimage += len(imglist)

        for impath in imglist:

            # append row for this image/mask - shouldn't get
            # any duplicates so just append
            roits_row = roits.append_row(impath, roimask, roimask_index + 1)
            if roits_row:
                nupdate += 1
            else:
                continue

            if verbose:
                csvstr = roits.format_csvrow(roits_row)
                print(csvstr)

            if debug:
                if nupdate == 10:
                    break

    # output CSV file
    if dryrun:
        nout = 0
    else:
        nout = roits.writeCSV(outpath)

    print("Images processed: %d" % (nimage, ))
    print("Images added to CSV: %d" % (nupdate, ))
    print("Total: %d" % (nout, ))
Пример #28
0
#
# Copyright Leonardo Amaral <*****@*****.**>
#
# Inspired in http://lkml.indiana.edu/hypermail/linux/kernel/0811.3/01905.html
#
# Thanks To: Ricardo Canale <*****@*****.**>
#            And his mother for the Notebook Donation ;)
#

from configparser import ConfigParser as configparser
from time import sleep
from syslog import syslog, openlog, LOG_DAEMON, LOG_INFO, LOG_WARNING, LOG_NOTICE
from os import getpid
import struct, signal, sys

opts = configparser()
opts.read('/etc/driveguardd.conf')

PID = getpid()

openlog('Driveguardd [%d]' % PID, 0, LOG_DAEMON)

with open('/var/run/driveguardd.pid', 'w') as pidfile:
    pidfile.write(str(PID) + '\n')


def exit_handler(signalsent, frame):
    if signalsent == signal.SIGTERM or signalsent == signal.SIGINT:
        syslog(LOG_NOTICE, 'Stopping daemon.')
        sys.exit(0)
    else:
Пример #29
0
argument_parser.add_argument(
    "-d",
    "--desktop",
    type=str,
    help="Switch to desktop DESKTOP, as defined in the config.",
    required=True)
argument_parser.add_argument("--config",
                             type=str,
                             default=expanduser("~") +
                             "/.config/switcher.conf",
                             help="Set the config file location.")
# Parse the arguments.
arguments = argument_parser.parse_args()

# Create a parser for the config file.
config = configparser()
# Chech that the config file exists and read it.
if isfile(arguments.config):
    config.read(arguments.config)
else:
    argument_parser.print_usage()
    communicate("error: Config file '%s' not found." % (arguments.config),
                quit=True)

# Check the arguments and the config.
if arguments.desktop not in config.sections():
    argument_parser.print_usage()
    communicate(
        "error: Unsupported desktop given. Supported values are: '%s'." %
        ("', '".join(config.sections())),
        quit=True)
Пример #30
0
import logging.config
import logging
import yaml
import configparser

from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine

from collector.pastebin.paste_collector import PasteParser
from models.model import PasteData, HitData

if __name__ == '__main__':

    with open('/users/paco/proyectos/osint/core/collector/log.yaml', 'r') as f:
        config = yaml.safe_load(f.read())
        logging.config.dictconfig(config)

    logger = logging.getlogger('pastes')

    cfg = configparser.configparser()
    cfg.read('/users/paco/proyectos/osint/core/collector/config.ini')

    engine = create_engine('mysql+pymysql://dev:xxxxx@localhost/testing')
    session = sessionmaker(bind=engine)
    session = session()
    data_sources = {'paste': pastedata(session), 'hit': hitdata(session)}
    pasteparser(data_sources['paste'], data_sources['hit'],
                cfg['pastebin']).start_scrapping()
#Script to check the number of unreviewed tweets and email the admins if there are some


import MySQLdb
import tweetsclient
import smtplib
from email.mime.text import MIMEText
import configparser
import pytz
import datetime

smtpconfig = configparser.configparser()
smtpconfig.read('conf/tweets-client.ini')
smtp = smtplib.SMTP(smtpconfig.get('moderation-alerts', 'mail_host'), smtpconfig.get('moderation-alerts', 'mail_port'))
smtp.login(smtpconfig.get('moderation-alerts', 'mail_username'), smtpconfig.get('moderation-alerts', 'mail_password'))

recipient = smtpconfig.get('moderation-alerts', 'unmoderated_recipient')
sender = smtpconfig.get('moderation-alerts', 'sender')
max_tweets = smtpconfig.getint('moderation-alerts', 'max_tweets')

config = tweetsclient.Config().get()
conn = MySQLdb.connect(
            host=config.get('database', 'host'),
            port=int(config.get('database', 'port')),
            db=config.get('database', 'database'),
            user=config.get('database', 'username'),
            passwd=config.get('database', 'password'),
            charset="utf8mb4",
            use_unicode=True
        )
cur = conn.cursor()
Пример #32
0
        _image.set_colorkey(colorkey)
        _image = _image.convert()
    else:
        _image = _image.convert_alpha()
    return _image


# import globals

import configparser, os

# Resource loading:
DATA_PY = os.path.abspath(os.path.dirname(__file__))
DATA_DIR = os.path.normpath(os.path.join(DATA_PY, '..', 'data/config/'))
""" Parse Globals File """
global_config = configparser.configparser()
global_config.read(os.path.join(DATA_DIR, "Globals.ini"))

G, D = {}, {}

for pair in global_config.items('game'):
    if pair[1].isdigit() == True:
        value = int(pair[1])
    elif pair[1].lower() == "true":
        value = True
    elif pair[1].lower() == "false":
        value = False
    else:
        value = pair[1]
    G[pair[0]] = value