Exemple #1
0
    def __init__(self):
        # deal with configuration file
        # configparser.read(default.cfg)
        cfg = SafeConfigParser()
        cfg.read("default.cfg")

        data = cfg.get("Paths", "data")
        orthography_profile = cfg.get("Paths", "orthography_profile")

        # set variables, e.g. source, orthography parser, etc.
        self.data = open(data, "r")

        self.o = OrthographyParser(orthography_profile)        
        # self.o = GraphemeParser()        

        self._languages = collections.defaultdict(int) # given unique ID to each unique language name
        self._concepts = collections.defaultdict(int) # ...
        self._counterparts = collections.defaultdict(int) # ..
        self._wordlist_iterator = self._process_input(self.data)

        # print(type(self.iterator))
        # print(len(self.counterparts))
        # words = self.get_qlc_tokenized_words()

        """
        count = 0
        for line in words:
            if line != "":
                print(line)
                count += 1
        print(count)
        """

        """
Exemple #2
0
def main():
    config = SafeConfigParser()
    config.readfp(open('config.cfg'))

    set_logging(config.get('logs', 'path'), 'xivdmgui')

    dat_manager = DatManager(config.get('game', 'path'))
    gen_manager = GenManager(dat_manager)

    app = QtGui.QApplication(sys.argv)

    main_window = QtGui.QMainWindow()
    main_window.setWindowTitle('Model Viewer')
    
    gl_widget = OpenGLWidget(dat_manager)

    list_view = ListWidget(sorted(walk_dict(gen_manager.get_category('models'))), gl_widget)

    splitter = QtGui.QSplitter()
    splitter.addWidget(list_view)
    splitter.addWidget(gl_widget)

    main_window.setCentralWidget(splitter)
    main_window.resize(640, 480)
    main_window.show()
    
    sys.exit(app.exec_())
    def loadConfig(cls):
        config = SafeConfigParser()
        if os.path.isfile(CONFIG_FILE):
            config.read_file(codecs.open(CONFIG_FILE, encoding='utf-8'))

        # The default config
        cls.config = {}
        cls.config['command_open'] = None
        cls.config['player'] = 1
        cls.config['debug'] = 0

        # Load the options
        if config.has_section('options'):
            for key, value in config.items('options'):
                cls.config[key] = value

        # Load the series
        cls.series = []
        for section in config.sections():
            if section != 'options':
                title = config.get(section, 'title')
                videos = config.get(section, 'videos')
                theTvDb = config.getint(section, 'theTvDb')
                lang = config.get(section, 'lang')
                cls.series.append([section, title, videos, theTvDb, lang])
def main():
    parser = SafeConfigParser()
    parser.read('settings.ini')

    connection = Connection(parser.get('mongodb', 'server'))
    db = None
    exec('db = connection.' + parser.get('mongodb', 'db'))

    ai = mc(db, "markov", exp=1.2)

    deck = []

    for i in range(100):
        c = ai.get()
        if(isinstance(c, type(""))):
            deck.append(c)

    deck.sort()

    deck_clean = bag(lambda: 0)

    for c in deck:
        deck_clean[c] += 1

    for c in set(deck):
        print ("%2i  %s" % (deck_clean[c], c))
Exemple #5
0
class Main:
  config = None
  db     = None
  hosts  = []

  def __init__(self, config_path):

    self.config = SafeConfigParser(allow_no_value=False)
    self.config.read(config_path + os.sep + 'communicator.cfg')

    self.db = oursql.Connection(
                                host=self.config.get('database', 'host'),
                                port=self.config.getint('database', 'port'),
                                user=self.config.get('database', 'username'),
                                passwd=self.config.get('database', 'password'),
                                db=self.config.get('database', 'dbname'),
                                charset="utf8")

  def getHosts(self):
    with self.db.cursor() as cur:
      cur.execute(b'SELECT `id`, `name`, `address`, `port`, `key`, `uuid`' +
                  b' FROM `host` WHERE `active`', plain_query=True)

      for row in cur:
        self.hosts.append(Host(
                          row[0],
                          row[1],
                          (row[2], row[3]),
                          row[4],
                          row[5],
                          self.db)
                    )

  def run(self):
    self.getHosts()
Exemple #6
0
    def parseConfig (self):
        """Reads the config file (wormBait.ini) and initializes variables accordingly"""
        config = SafeConfigParser()
        config.read('wormBait.ini')

        configDbIds = ""
        configDegFile = ""
        configOutFile = ""
        
        if config.has_section('wormBait'):
            configDbIds = config.get('wormBait', 'dbIds')
            configDegFile = config.get('wormBait', 'degFile')
            configOutFile = config.get('wormBait', 'outFile')

        if configDbIds:
            self.entryList.writeln(configDbIds)
        else:
            self.entryList.writeln("Enter DB IDs here")

        if configDegFile:
            self.dbFilePath.set(configDegFile)
        else:
            self.dbFilePath.set('Enter path to input database file here')

        if configOutFile:
            self.outFilePath.set(configOutFile)
        else:
            self.outFilePath.set('Enter desired path to output CSV file here')
Exemple #7
0
def file_list(root):
    # read the root file, get its [config] section
    # and use it to construct the file list.
    conf = SafeConfigParser()
    conf.read(root)
    try:
        dirlist = conf.get("config", "path").replace(' ', '').split(',')
    except:
        dirlist = []
    try:
        files = conf.get("config", "files").replace(' ', '').split(',')
    except:
        files = []

    
    root = os.path.abspath(root)
    # all relative pathnames will be relative to the rootdir
    rootdir = os.path.dirname(root)
    flist = [root]
    dirlist = [os.path.abspath("%s/%s" % (rootdir, x)) if not os.path.isabs(x) else os.path.abspath(x) for x in dirlist]
    # insert the directory of the root file at the beginning
    dirlist.insert(0, rootdir)

    # import pdb; pdb.set_trace()
    for d in dirlist:
        for f in files:
            fnm = "%s/%s" % (d, f)
            if fnm in flist:
                continue
            if os.access(fnm, os.F_OK):
                fnmlist = file_list(fnm)
                flist += fnmlist
    return uniq(flist)
Exemple #8
0
class Config:

    def __init__(self, argParser, path=None):
        if not path:
            path = os.path.dirname(os.path.realpath(__file__))
        user = getpass.getuser()
        self._cfg = SafeConfigParser({
                                     'del_bin': 'rm',
                                     'ssh_port': '22',
                                     'ssh_timeout': '5',
                                     'ssh_user': user,
                                     'file_ttl': '1d',
                                     'file_perm': '0644',
                                     'file_group': user,
                                     'file_user': user,
                                     'rand_len': '2',
                                     },
                                     comment_prefixes=('#', ';'))
        self._cfg.read(os.path.join(path, 'shup.cfg'))
        self.config = 'default_config'
        self.args = argParser.__dict__

    def get(self, option, args=None):
        opt = None
        if option in self.args and self.args[option]:
            opt = self._cfg.get(self.args['rule'], option, raw=False,
                                vars={option: self.args[option]})
        else:
            opt = self._cfg.get(self.args['rule'], option, raw=False)
        return opt

    def askPasswd(self):
        return getpass.getpass()
Exemple #9
0
 def readconfig(self):
     """Reads settings from cfg files"""
     config = SafeConfigParser(allow_no_value=True)
     config.read(self.jobspath+self.selected['name']+'.ini')
     self.selected['source'] = config.get('directories','source')
     self.selected['dest'] = config.get('directories','destination')
     self.selected['options'] = config.get('options', 'options')
     self.selected['sudo'] = config.get('options', 'sudo')
def init():
	cont=False
	clear()

	credentials_from_file = False

	credentials = SafeConfigParser();
	credentials.read('settings.txt')
	if (credentials.has_option('main','email') 
		  and credentials.has_option('main','password')):
		credentials_from_file = True

	while(cont == False):
		driver.get('https://www.facebook.com/')
	
		if credentials_from_file:
			email = credentials.get('main', 'email')
			password = credentials.get('main', 'password')
		else:
			email=input('Email : ')
			password=getpass('Password : '******'input')
		inputs[1].send_keys(email)
		inputs[2].send_keys(password)
		driver.implicitly_wait(10)
		inputs[3].click()

		if str(driver.current_url).split('=')[0] == 'https://www.facebook.com/login.php?login_attempt':
			clear()
			print('Invalid Email/Password')
			if credentials_from_file:
				print('Switching to manual input')
				credentials_from_file = False
		else: 
			cont=True

	print('Loading...\n')
	profile=[x for x in driver.find_elements_by_tag_name('a') if x.get_attribute('title') == 'Profile'][0].get_attribute('href').split('/')[3]
	driver.get('https://www.facebook.com/messages/'+profile)
	
	global replyButton
	replyButton=[x for x in driver.find_elements_by_tag_name('input') if x.get_attribute('value') == 'Reply'][0]

	if not(replyButton.is_displayed()):
		driver.find_element_by_css_selector('._1s0').click()

	if os.path.isfile(os.getcwd()+'/commands.txt'):
		with open('commands.txt','r') as foo:
			for a in foo.read().split('\n'):
				ls=a.split(' ')
				if len(ls) >= 2:
					global customCommands
					customCommands[ls[0]]=' '.join(ls[1:])

	print('Ready!')
Exemple #11
0
 def from_cfg(self, filename):
     parser = SafeConfigParser()
     with open(filename) as fh:
         parser.readfp(fh)
     self.client_id = parser.get('credentials', 'client_id')
     self.client_secret = parser.get('credentials', 'client_secret')
     if parser.has_option('credentials', 'refresh_token'):
         token = parser.get('credentials', 'refresh_token').strip()
         if token:
             self.refresh_token = token
Exemple #12
0
def main():
    defaults = {'network': {'server': None, 'port': 6667, 'password': None}, \
                'bot': {'nick': 'Pyre', 'ident': 'pyro', 'realname': 'Pyre', 'usermode': '+iwx'}, \
                'ctcp': {'version': 'Pyre IRC', 'finger': 'Pyre IRC', 'userinfo': 'Pyre IRC'}}
    parser = SafeConfigParser(defaults)
    parser.read('config.ini')

    options = {}
    options['server'] = parser.get('network','server')
    options['port'] = parser.get('network','port')
    options['ssl'] = (options['port'][0] == '+')
    options['port'] = int(options['port'])
    options['password'] = parser.get('network', 'password')

    options['nick'] = parser.get('bot', 'nick')
    options['ident'] = parser.get('bot', 'ident')
    options['realname'] = parser.get('bot', 'realname')
    options['usermode'] = parser.get('bot', 'usermode')

    options['version'] = parser.get('ctcp', 'version')
    options['finger'] = parser.get('ctcp', 'finger')
    options['userinfo'] = parser.get('ctcp', 'userinfo')

    bot = Pyre(options)
    bot.connect()
    
    # Dumb CLI
    text = input()
    while(not text.startswith("QUIT") and not bot.error and bot.connected):
        bot.write(text)
        text = input()
    bot.write(text)
Exemple #13
0
def test_pulp():
    tasker = DockerTasker()
    parsed_config = SafeConfigParser()
    assert len(parsed_config.read(PULP_CONF_PATH)) > 0

    host = parsed_config.get("server", "host")
    un = parsed_config.get("server", "username")
    pswd = parsed_config.get("server", "password")
    verify_ssl = parsed_config.getboolean("server", "verify_ssl")
    push_image_to_pulp("busybox-test", "busybox", host, un, pswd, verify_ssl,
                       tasker, logging.getLogger("dock.tests"))
Exemple #14
0
class BaseCloudProvider(CloudProvider):

    def __init__(self, config):
        self._config = BaseConfiguration(config)
        self._config_parser = SafeConfigParser()
        self._config_parser.read(CloudBridgeConfigLocations)

    @property
    def config(self):
        return self._config

    @property
    def name(self):
        return str(self.__class__.__name__)

    def has_service(self, service_type):
        """
        Checks whether this provider supports a given service.

        :type service_type: str or :class:``.CloudServiceType``
        :param service_type: Type of service to check support for.

        :rtype: bool
        :return: ``True`` if the service type is supported.
        """
        try:
            if getattr(self, service_type):
                return True
        except AttributeError:
            pass  # Undefined service type
        return False

    def _get_config_value(self, key, default_value):
        """
        A convenience method to extract a configuration value.

        :type key: str
        :param key: a field to look for in the ``self.config`` field

        :type default_value: anything
        :param default_value: the default value to return if a value for the
                              ``key`` is not available

        :return: a configuration value for the supplied ``key``
        """
        if isinstance(self.config, dict) and self.config.get(key):
            return self.config.get(key, default_value)
        elif hasattr(self.config, key) and getattr(self.config, key):
            return getattr(self.config, key)
        elif (self._config_parser.has_option(self.PROVIDER_ID, key) and
              self._config_parser.get(self.PROVIDER_ID, key)):
            return self._config_parser.get(self.PROVIDER_ID, key)
        return default_value
Exemple #15
0
def run():
    config = SafeConfigParser()
    config.read("config/defaults.cfg")
    config.read("~/.config/mcdu.cfg")
    config.read("config/mcdu.cfg")

    sim = config.get("General", "sim")
    if sim == "fsx":
        from mcdu.fsx import FSXReceiver
        receiver = FSXReceiver()
    elif sim == "xplane":
        from mcdu.xplane import XPlaneReceiver
        receiver = XPlaneReceiver()
    else:
        print("no simulator set");
        return 1

    receiver.start()

    api = ACARS_API(config.get("ACARS", "logon"))
    acars = ACARS(api)
    atc = ATC(api)

    mcdu = MCDU()
    mcdu.subsystem_register(acars)
    mcdu.subsystem_register(atc)
    mcdu.menu()

    application = tornado.web.Application([
        (r"^/socket", WebSocket, dict(mcdu=mcdu)),
        (r"^/(.*)$", tornado.web.StaticFileHandler, {"path": "res/", "default_filename": "index.html"}),
    ], debug=False)

    port = config.getint("General", "port")
    application.listen(port)

    try:
        print("running on port %i" % port)
        webbrowser.open_new("http://localhost:%i" % port)
        tornado.ioloop.IOLoop.instance().start()
    except KeyboardInterrupt:
        print("quitting...")
    except Exception as e:
        import traceback
        traceback.print_exc()
        print("quitting...")
    finally:
        receiver.stop()
        acars.stop()
        atc.stop()
        return 0
Exemple #16
0
def getConnectionInfo():
    config_filename = os.path.join(os.path.dirname(__file__), os.path.pardir, 'connection.ini')
    cp = SafeConfigParser()
    cp.read(config_filename)

    info = {
        'server_name': cp.get('server', 'name'),
        'server_ip': cp.get('server', 'ip'),
        'server_port': cp.getint('server', 'port'),
        'client_name': cp.get('client', 'name'),
        'user': cp.get('user', 'name'),
        'password': cp.get('user', 'password'),
    }
    return info
def get_connection(database, hostname=None, port=None, login=None, password=None):
    parser = SafeConfigParser()
    filename = os.path.join(os.path.dirname(__file__), 'config.ini')
    parser.read(filename)
    if hostname is None:
        hostname = parser.get(database, 'host')
    if port is None:
        port = parser.getint(database, 'port')
    if login is None:
        login = parser.get(database, 'username')
    if password is None:
        password = parser.get(database, 'password')

    return openerplib.get_connection(hostname=hostname, port=port, database=database, login=login, password=password)
Exemple #18
0
    def __init__(self, client_id, client_secret, access_token=None,
                 refresh_token=None):
        self.client_id = client_id
        self.client_secret = client_secret
        self.access_token = access_token
        self.refresh_token = refresh_token

        parser = SafeConfigParser()
        parser.read(os.path.dirname(os.path.abspath(__file__)) + '/config.ini')

        self.API_ROOT_URL = parser.get('config', 'api_root_url')
        self.SDK_VERSION = parser.get('config', 'sdk_version')
        self.AUTH_URL = parser.get('config', 'auth_url')
        self.OAUTH_URL = parser.get('config', 'oauth_url')
Exemple #19
0
def extract_ssi(control_fname):
    """Extract SSI from a PyTOPKAPI simulation file.

    Read a PyTOPKAPI simulation file and it's associated parameter
    file and compute the Soil Saturation Index (SSI) for each model
    cell and timestep. The results are returned as a Numpy array.

    Parameters
    ----------
    control_fname : string
        The file name of a PyTOPKAPI simulation control file. The name
        should contain the full path relative to the current
        directory.

    Returns
    -------
    ssi : Numpy ndarray
        A Numpy array containing the calculated SSI values.

    """
    config = SafeConfigParser()
    config.read(control_fname)

    global_param_fname = config.get('input_files', 'file_global_param')
    param_fname = config.get('input_files', 'file_cell_param')
    sim_fname = config.get('output_files', 'file_out')
    fac_L = config.getfloat('calib_params', 'fac_L')

    params = np.loadtxt(param_fname)
    glob_params = np.genfromtxt(global_param_fname, names=True)

    soil_depth = fac_L*params[:, 8]
    factor = params[:, 11] - params[:, 10]
    cell_area = glob_params['X']**2 # m^2

    soil_depth = ma.masked_values(soil_depth, 0.0)
    factor = ma.array(factor, mask=soil_depth.mask)
    div = factor*soil_depth*cell_area

    tkpi_file = h5py.File(sim_fname, 'r')
    soil_vol = tkpi_file['/Soil/V_s'][...]
    tkpi_file.close()

    # ssi = (Vs/cell_vol)*100
    # cell_vol = (theta_s - theta_r)*soil_depth*cell_area
    sv = ma.array(soil_vol, mask=soil_depth.mask)
    ssi = (sv/(div))*100.0

    return ssi
Exemple #20
0
def parse_config(FILENAME):
    scpr = SafeConfigParser()
    scpr.read(utils.fix_path(FILENAME))

    SMTP_SERVER  = scpr.get("smtp", "server")
    SMTP_PORT = scpr.get("smtp", "port")
    USERNAME = scpr.get('account', 'username')
    PASSWORD = scpr.get('account', 'password')
    CSVPATH = utils.fix_path(scpr.get("file", "mails_data"))

    return {'user': USERNAME,
            'passwd': PASSWORD,
            'server': SMTP_SERVER,
            'port': SMTP_PORT,
            'csv_path': CSVPATH}
 def loadGamepad(self,_controllerName):
     pygame.joystick.init()
     controller_parser = SafeConfigParser()
     controller_parser.read(os.path.join(os.path.join(self.datadir.replace('main.exe',''),'settings'),'gamepads.ini'))
     if controller_parser.has_section(_controllerName):
         joystick = None
         for pad in range(pygame.joystick.get_count()):
             joy = pygame.joystick.Joystick(pad)
             if joy.get_name() == _controllerName:
                 joystick = joy
                 joystick.init()
         
         if joystick:
             jid = joystick.get_id()
         else:
             jid = None
         
         axes = {}
         buttons = {}
         for opt in controller_parser.options(_controllerName):
             if opt[0] == 'a':
                 axes[int(opt[1:])] = tuple(controller_parser.get(_controllerName, opt)[1:-1].split(','))
             elif opt[0] == 'b':
                 buttons[int(opt[1:])] = controller_parser.get(_controllerName, opt)
     
         pad_bindings = engine.controller.PadBindings(_controllerName,jid,axes,buttons)
         
         return engine.controller.GamepadController(pad_bindings)
     else:
         joystick = None
         for pad in range(pygame.joystick.get_count()):
             joy = pygame.joystick.Joystick(pad)
             if joy.get_name() == _controllerName:
                 joystick = joy
                 joystick.init()
         
         if joystick:
             jid = joystick.get_id()
         else:
             jid = None
         
         axes = dict()
         buttons = dict()
         
         pad_bindings = engine.controller.PadBindings(_controllerName,jid,axes,buttons)
         self.setting[joystick.get_name()] = pad_bindings
         
         return engine.controller.GamepadController(pad_bindings)
def apply_user_settings(filepath, quiet=True):
    """
    Read and apply user settings in a configuration file.

    Parameters
    ----------
    filepath : str
        Path to the configuration file.
    quiet : bool, default True
        If True, no output will be written to standard output.
    """
    database = specifymodels.database
    if not quiet:
        print(_bold('applying user settings: '), end='')
    if not os.path.isfile(filepath):
        raise OSError('Invalid configuration file... ' + filepath)
    else:
        # Read config file
        config_parser = SafeConfigParser()
        config_parser.read(filepath)
        db_name = config_parser.get('MySQL', 'Database')
        db_host = config_parser.get('MySQL', 'Host')
        db_user = config_parser.get('MySQL', 'User')
        db_password = config_parser.get('MySQL', 'Password')
        collection = config_parser.get('Specify', 'CollectionName')
        specify_username = config_parser.get('Specify', 'User')
        # Initiate database
        database.init(
            database=db_name, host=db_host, user=db_user, passwd=db_password)
        collection_context = _get_collection_context(
            database=database, collection_name=collection)
        user_agentid = _get_user_agentid(
            database=database, divisionid=collection_context['divisionid'],
            specify_username=specify_username)
        specify_context = {'database': database, 'user_agentid': user_agentid}
        specify_context.update(collection_context)
        # Apply user settings
        TableDataset.specify_context = specify_context
        if not quiet:
            print(
                '{0}\n'
                '    database name: {1}\n'
                '    database host: {2}\n'
                '    collection:    {3}\n'
                '    Specify user:  {4}'
                .format(
                    os.path.abspath(filepath), repr(db_name), repr(db_host),
                    repr(collection), repr(specify_username)))
Exemple #23
0
class GitConfigParser():
    CORE = 'core'
    def __init__(self, branch):
        self.section = branch
        self.file = join(GIT_DIR, '.git', 'gitcc')
        self.parser = SafeConfigParser();
        self.parser.add_section(self.section)
    def set(self, name, value):
        self.parser.set(self.section, name, value)
    def read(self):
        self.parser.read(self.file)
    def write(self):
        self.parser.write(open(self.file, 'w'))
    def getCore(self, name, *args):
        return self._get(self.CORE, name, *args)
    def get(self, name, *args):
        return self._get(self.section, name, *args)
    def _get(self, section, name, default=None):
        if not self.parser.has_option(section, name):
            return default
        return self.parser.get(section, name)
    def getList(self, name, default=None):
        return self.get(name, default).split('|')
    def getInclude(self):
        return self.getCore('include', '.').split('|')
    def getBranches(self):
        return self.getList('branches', 'main')
    def getExtraBranches(self):
        return self.getList('_branches', 'main')
def temporary_store_decorator(config_files_directory = default_config_files_directory, file_name = None):
    parser = SafeConfigParser()
    config_ini = os.path.join(config_files_directory, 'config.ini')
    read_config_file_name = parser.read([config_ini])
    tmp_directory = parser.get('data', 'tmp_directory')
    assert tmp_directory is not None, \
        'tmp_directory is not set: {!r} in {}'.format(tmp_directory, read_config_file_name)
    assert os.path.isabs(tmp_directory), \
        'tmp_directory should be an absolut path: {!r} in {}'.format(tmp_directory, read_config_file_name)
    if not os.path.isdir(tmp_directory):
        log.info('tmp_directory does not exist: {!r} in {}. Creating it.'.format(tmp_directory, read_config_file_name))
        os.makedirs(tmp_directory)

    assert file_name is not None
    if not file_name.endswith('.h5'):
        file_name = "{}.h5".format(file_name)
    file_path = os.path.join(tmp_directory, file_name)

    def actual_decorator(func):
        def func_wrapper(*args, **kwargs):
            temporary_store = HDFStore(file_path)
            try:
                return func(*args, temporary_store = temporary_store, **kwargs)
            finally:
                gc.collect()
                temporary_store.close()

        return func_wrapper

    return actual_decorator
Exemple #25
0
class Config(object):
    """
	The configuration object for a link to a database.
    """
    def __init__(self, section=None, config_file=None):
        self.parser = SafeConfigParser()#allow_no_value=True)

        default_file = os.path.expanduser('~/.EMIS/pyEMIS.ini')
        if not config_file:
            config_file = default_file
        self.config_file = config_file

        if not section:
            section = 'DEFAULT'
        self.section = section

        read_files = self.parser.read(self.config_file)

        if not read_files:
            self.parser.add_section(self.section)
            with open(default_file, 'wb') as configfile:
                self.parser.write(configfile)

        if not self.parser.has_section(self.section):
            raise ConfigurationFileError('Section [%s] not found in configuration file (sections = %s).' % (self.section, self.parser.sections()), self.config_file)

    def __getattr__(self, name):
        try:
            return self.parser.get(self.section, name)
        except NoSectionError as e:
            raise ConfigurationFileError('Section [%s] not found in configuration file (sections = %s).' % (self.section, self.parser.sections()), self.config_file)
        except NoOptionError as e:
            raise ConfigurationFileError('Option "%s = ..." not in section [%s] of configuration file (options = %s).' % (name, self.section, self.parser.options(self.section)), self.config_file)
Exemple #26
0
def getConfPart(name, section='main'):
    parser = SafeConfigParser()
    # Get absolute dir for config file
    configLocation = __file__.replace("app/helpers/getConfig.py","config.ini")

    parser.read(configLocation)
    return parser.get(section, name)
def main(ctx):

  out_files = [open(join(ctx.DIR, "%s.log" % c), 'w') for c in columns]
  for i in range(len(columns)):
    out_files[i].write("#LABEL:%s\n" % columns[i])

  with open(join(ctx.DIR, 'hist.csv'), 'r') as csv:
    csv.readline()
    for line in csv:
      vs = line.split(', ')
      for i in range(len(columns)):
        out_files[i].write("%d %s\n" % (int(vs[0]), vs[i+1].rstrip()))

  chart_type = "xy"
  cp = SafeConfigParser(allow_no_value=True)
  cp.read(ctx.job_file)
  for s in cp.sections():
    try:
      epoch = cp.get(s, 'log_unix_epoch')
      chart_type = "timeseries"
    except:
      pass
  print ("Chart Type: %s" % chart_type)
  with open(join(ctx.DIR, 'results.html'), 'w') as fp:
    fp.write(html % (chart_type,))
Exemple #28
0
def read_config(filename):
    config = SafeConfigParser()
    config.readfp(open(filename))
    for section in config.sections():
        if section == 'extensions':
            for (extension, extractor) in config.items(section):
                _register_extension(extension, extractor)
        elif section.startswith('extractor:'):
            extractor = section[10:]
            if extractor not in EXTRACTORS:
                print('Unknown extractor %s. '
                      'Check --list-extractors for available options' % extractor,
                        file=sys.stderr)
                sys.exit(1)
            extractor_config = dict(config.items(section))
            EXTRACTORS[extractor].update_config(**extractor_config)
        elif section.startswith('extension'):
            print('Use of %s section is obsolete. '
                  'Please use the "extensions" section.' % section,
                  file=sys.stderr)
            extension = section[10:]
            plugin = config.get(section, 'plugin')
            if not plugin:
                print('No plugin defined for extension %s' % extension,
                    file=sys.stderr)
            _register_extension(extension, plugin)
Exemple #29
0
def read_conf():
    parser = SafeConfigParser()
    parser.read(conf_file)
    try:
        addr = parser.get(SESSION_NAME, 'addr')
    except NoOptionError:
        addr = 'localhost'
    try:
        port = parser.get(SESSION_NAME, 'port')
    except NoOptionError:
        port = 8080
    try:
        password = parser.get(SESSION_NAME, 'password')
    except NoOptionError:
        password = None
    return addr, port, password
Exemple #30
0
def load_setup_connection_string(section):
    """
    Attempts to read the default connection string from the setup.cfg file.

    If the file does not exist or if it exists but does not contain the connection string, None is returned.  If the
    file exists but cannot be parsed, an exception is raised.
    """
    from os.path import exists, join, dirname, splitext, basename
    from configparser import SafeConfigParser

    FILENAME = "setup.cfg"
    KEY = "connection-string"

    path = join(dirname(dirname(abspath(__file__))), "tmp", FILENAME)

    if exists(path):
        try:
            p = SafeConfigParser()
            p.read(path)
        except:
            raise SystemExit("Unable to parse %s: %s" % (path, sys.exc_info()[1]))

        if p.has_option(section, KEY):
            return p.get(section, KEY)

    return None
Exemple #31
0

def getFisher(bigDataDir, expName, gridName, calName, saveName, version):
    saveId = expName + "_" + gridName + "_" + calName + "_v" + version

    paramList, FisherTot = pickle.load(
        open(bigDataDir + "savedS8Fisher_" + saveId + "_" + saveName + ".pkl",
             'rb'))
    return FisherTot


iniFile = "input/pipeline.ini"
Config = SafeConfigParser()
Config.optionxform = str
Config.read(iniFile)
bigDataDir = Config.get('general', 'bigDataDirectory')

fishSection = "lcdm"
noatm = ""
#noatm = "-noatm"
cal = "CMB_all"
#cal = "owl2"
#derivSet = "0.3_ysig_0.127"
derivSet = "0.5"
gridName = "grid-default"
#gridName = "grid-1.2"

cosmoFisher = Config.get('fisher-' + fishSection, 'saveSuffix')
origParams = Config.get('fisher-' + fishSection, 'paramList').split(',')
"""RES STUDY"""
#cmbfisher3 = getFisher(bigDataDir,"S4-3.0-0.4"+noatm,gridName,cal,cosmoFisher,derivSet)
        # self.check_tree(results)

    def check_tree(self):
        """Validate that jobs executed after their dependencies."""
        for node in self.G:
            started = self.results[node].metadata.started
            for parent in self.G.predecessors(node):
                finished = self.results[parent].metadata.completed
                assert started > finished, "{} should have happened after {}".format(
                    node, parent)        

            
if __name__ == '__main__':
    parser.read('parameters.ini')
    DBNAME   = parser.get("redshift", "dbname")
    HOST     = parser.get("redshift", "host")
    PASSWORD = parser.get("redshift", "password")
    PORT     = parser.get("redshift", "port")
    USER     = parser.get("redshift", "user")
    connector = open_connection(DBNAME, USER, HOST, PORT, PASSWORD)

    if False:
        query = "SELECT * from apps LIMIT 5 ;"
        run_query_py(query, connector)
        
    if True:
        pass


    close_connection(connector)    
Exemple #33
0
confFile = os.path.join(confDir, "gedit-markdown.ini")

parser = SafeConfigParser()
parser.optionxform = str
parser.add_section("markdown-preview")
parser.set("markdown-preview", "externalBrowser", markdownExternalBrowser)
parser.set("markdown-preview", "panel", markdownPanel)
parser.set("markdown-preview", "shortcut", markdownShortcut)
parser.set("markdown-preview", "version", markdownVersion)
parser.set("markdown-preview", "visibility", markdownVisibility)
parser.set("markdown-preview", "visibilityShortcut",
           markdownVisibilityShortcut)

if os.path.isfile(confFile):
    parser.read(confFile)
    markdownExternalBrowser = parser.get("markdown-preview", "externalBrowser")
    markdownPanel = parser.get("markdown-preview", "panel")
    markdownShortcut = parser.get("markdown-preview", "shortcut")
    markdownVersion = parser.get("markdown-preview", "version")
    markdownVisibility = parser.get("markdown-preview", "visibility")
    markdownVisibilityShortcut = parser.get("markdown-preview",
                                            "visibilityShortcut")

if not os.path.exists(confDir):
    os.makedirs(confDir)

with open(confFile, "w") as confFile:
    parser.write(confFile)


class MarkdownPreviewPlugin(GObject.Object, Gedit.WindowActivatable):
Exemple #34
0
def main():
    try:
        locale.setlocale(locale.LC_ALL)
    except:
        pass

    path = os.path.expanduser('~/.mocpscrob/')
    configpath = path + 'config'
    cachepath = path + 'cache'
    pidfile = path + 'pid'
    logfile = path + 'scrobbler.log'
    hostname = 'post.audioscrobbler.com'
    exit_code = 0

    if not os.path.isdir(path):
        os.mkdir(path)

    shortargs = 'dc:ovqhk'
    longargs = 'daemon config= offline verbose quiet help kill'
    try:
        opts, args = getopt.getopt(sys.argv[1:], shortargs, longargs.split())
    except getopt.error as e:
        print(str(e), file=sys.stderr)
        print('Use --help parameter to get more info', file=sys.stderr)
        return

    daemon = False
    verbose = False
    quiet = False
    offline = False
    kill = False

    for o, v in opts:
        if o in ('-h', '--help'):
            print(
                'mocp-scrobbler.py 0.2',
                'Usage:',
                '  mocp-scrobbler.py [--daemon] [--offline] [--verbose | --quiet] [--config=FILE]',
                '  mocp-scrobbler.py --kill [--verbose | --quiet]',
                '',
                '  -c, --config=FILE  Use this file instead of default config',
                '  -d, --daemon       Run in background, messages will be written to log file',
                '  -k, --kill         Kill existing scrobbler instance and exit',
                '  -o, --offline      Don\'t connect to service, put everything in cache',
                '  -q, --quiet        Write only errors to console/log',
                '  -v, --verbose      Write more messages to console/log',
                sep='\n')
            return 1
        daemon = o in ('-d', '--daemon')
        offline = o in ('-o', '--offline')
        if o in ('-v', '--verbose'):
            verbose = True
            quiet = False
        if o in ('-q', '--quiet'):
            quiet = True
            verbose = False
        kill = o in ('-k', '--kill')
        if o in ('-c', '--config'):
            configfile = v

    if os.path.isfile(pidfile):
        if kill:
            if not quiet:
                print('Attempting to kill existing scrobbler process...')
        else:
            print(
                'Pidfile found! Attempting to kill existing scrobbler process...',
                file=sys.stderr)
        try:
            with open(pidfile) as f:
                pid = int(f.read().strip())
            os.kill(pid, signal.SIGTERM)
            time.sleep(1)
        except (OSError, ValueError) as e:
            os.remove(pidfile)
        except IOError as e:
            print(
                'Error occured while reading pidfile. Check if process is really running, delete pidfile ("%s") and try again. Error was: "%s"'
                % (pidfile, str(e)),
                file=sys.stderr)
            return 1
    elif kill:
        if not quiet:
            print('Pidfile not found.')

    if os.path.isfile(pidfile):
        print('Waiting for existing process to end...')
        while os.path.isfile(pidfile):
            time.sleep(1)

    if kill: return

    config = SafeConfigParser()

    try:
        config.read(configpath)
    except:
        print('Not configured. Edit file: %s' % configpath, file=sys.stderr)
        return 1

    getter = lambda k, f: config.get('scrobbler', k) if config.has_option(
        'scrobbler', k) else f

    login = getter('login', None)
    password = getter('password', None)
    password_md5 = getter('password_md5', None)
    streams = getter('streams', '1').lower in ('true', '1', 'yes')
    hostname = getter('hostname', hostname)

    if not login:
        print('Missing login. Edit file: %s' % configpath, file=sys.stderr)
        return 1

    if not (password or password_md5):
        print('Missing password. Edit file: %s' % configpath, file=sys.stderr)
        return 1

    if password:
        password_md5 = md5(password.encode('utf-8')).hexdigest()
        config.set('scrobbler', 'password_md5', password_md5)
        config.remove_option('scrobbler', 'password')
        with open(configpath, 'w') as f:
            config.write(f)
        print('Your password wasn\'t hashed - config file has been updated')

    del password

    forked = False
    if daemon:
        try:
            pid = os.fork()
            if pid:
                if not quiet:
                    print('Scrobbler daemon started with pid %d' % pid)
                sys.exit(0)
            forked = True
        except Exception as e:
            print(
                'Could not daemonize, scrobbler will run in foreground. Error was: "%s"'
                % str(e),
                file=sys.stderr)

    if verbose:
        log.setLevel(logging.DEBUG)
    elif quiet:
        log.setLevel(logging.WARNING)

    try:
        with open(pidfile, 'w') as f:
            f.write(str(os.getpid()))
    except Exception as e:
        print('Can\'t write to pidfile, exiting. Error was: "%s"' % str(e),
              file=sys.stderr)
        return 1

    if forked:
        try:
            lout = StupidFileHandler(logfile, 'w')
        except:
            try:
                logfile = os.getenv('TEMP', '/tmp/') + 'mocp-pyscrobbler.log'
                lout = StupidFileHandler(logfile, 'wa')
            except:
                lout = NullHandler()
        formatter = logging.Formatter('%(levelname)s %(asctime)s %(message)s')
        lout.setFormatter(formatter)
        log.addHandler(lout)
    else:
        lout = StupidStreamHandler(sys.stdout)
        log.addHandler(lout)

    lastfm = Scrobbler(hostname, login, password_md5)

    if os.path.isfile(cachepath):
        cache = None

        try:
            with open(cachepath, 'rb') as f:
                cache = pickle.load(f)
        except Exception as e:
            log.exception('Error while trying to read scrobbling cache:')

        if cache and isinstance(cache, list):
            lastfm.cache = cache

        try:
            os.remove(cachepath)
        except:
            pass

    if not offline:
        lastfm.start()

    unscrobbled = True
    unnotified = True

    newtrack = None
    oldtrack = None

    maxsec = 0
    lasttime = 0

    running = True

    def handler(i, j):
        nonlocal running
        log.info('Got signal, shutting down...')
        running = False
        signal.signal(signal.SIGQUIT, signal.SIG_IGN)
        signal.signal(signal.SIGTERM, signal.SIG_IGN)

    #signal.signal(signal.SIGINT, handler)
    signal.signal(signal.SIGQUIT, handler)
    signal.signal(signal.SIGTERM, handler)

    try:
        while running:
            newtrack, state = get_mocp()
            if (state == 'play' and newtrack) or (state == 'stop'
                                                  and oldtrack):
                if newtrack and (not lasttime) and (not newtrack.length):
                    lasttime = newtrack.position

                a = (newtrack != oldtrack) or state == 'stop'
                b = (not a) and newtrack.length and (
                    newtrack.length - 15 < maxsec) and (newtrack.position < 15)
                if a or b:
                    if oldtrack:
                        oldtrack.position = maxsec

                        toscrobble = False
                        if oldtrack.length:
                            toscrobble = (oldtrack.position > 240) or (
                                oldtrack.position > oldtrack.length * 0.5)
                        else:
                            toscrobble = (oldtrack.position - lasttime > 60)

                        if unscrobbled and toscrobble:
                            if state == 'stop':
                                log.info('Scrobbling [on stop]')
                            else:
                                log.info('Scrobbling [on change]')
                            lastfm.scrobble(oldtrack, not oldtrack.length)

                    if newtrack:
                        if not newtrack.length:
                            log.info('Now playing (stream): %s' % newtrack)
                        elif b:
                            log.info('Now playing (repeated): %s' % newtrack)
                        else:
                            log.info('Now playing: %s' % newtrack)

                    if state != 'stop':
                        oldtrack = newtrack
                    else:
                        oldtrack = None
                    unscrobbled = True
                    unnotified = True
                    maxsec = 0
                    if not newtrack.length:
                        lasttime = newtrack.position
                    else:
                        lasttime = 0

                maxsec = max(maxsec, newtrack.position)

                if newtrack and unnotified:
                    lastfm.notify(newtrack)
                    unnotified = False

                if newtrack and unscrobbled and newtrack.length >= 30 and (
                        newtrack.position > newtrack.length * _SCROB_FRAC):
                    log.info('Scrobbling [on %d%%]' % int(_SCROB_FRAC * 100))
                    lastfm.scrobble(newtrack)
                    unscrobbled = False

            time.sleep(5)
    except KeyboardInterrupt:
        log.info('Keyboard interrupt. Please wait until I shut down')
    except Exception:
        log.exception('An error occured:')
        exit_code = 1

    if not offline:
        lastfm.stop()
        if lastfm.isAlive():
            lastfm.join()

    if lastfm.cache:
        try:
            with open(cachepath, 'wb') as f:
                pickle.dump(lastfm.cache, f, pickle.HIGHEST_PROTOCOL)
        except:
            log.exception('Error while trying to save scrobbling cache:')

    try:
        os.remove(pidfile)
    except:
        pass

    return exit_code
Exemple #35
0
#!/usr/bin/env python

from configparser import SafeConfigParser

config = SafeConfigParser()
config.read('config.ini')

# Read config.ini and store into variables
HOST = config.get('app', 'HOST')
PORT = int(config.get('app', 'PORT'))
DEBUG = config.get('app', 'DEBUG')

OPENHUBUSER = config.get('openhub', 'OPENHUBUSER')
OPENHUBPASS = config.get('openhub', 'OPENHUBPASS')
Exemple #36
0
def JSONSchemaToC11(argv):
    parser = argparse.ArgumentParser(
        description=u'Generate c11 code by json schema.')
    parser.add_argument(u'configFile',
                        metavar=u'config_file',
                        type=open,
                        help=u'The configuration file')
    args = parser.parse_args(argv)

    if not args.configFile:
        return (1, u'Can\'t find the configuration file')

    os.environ['CD'] = os.path.dirname(os.path.abspath(args.configFile.name))

    config = SafeConfigParser(os.environ)
    config.readfp(args.configFile)

    schema_directory = config.get(u'glTF', u'schema_directory')
    code_file_name = config.get(u'glTF', u'code_file_name')
    extensions_schema_directories = []
    for extensions_schema_directory in config.get(
            u'glTF', u'extensions_schema_directories').split(u','):
        extensions_schema_directories.append(
            extensions_schema_directory.replace(u'\n', u''))

    major_version = config.get(u'glTF', u'major_version')
    minor_version = config.get(u'glTF', u'minor_version')
    patch_version = config.get(u'glTF', u'patch_version')

    output_header_path = config.get(u'output', u'output_header_path')
    output_source_path = config.get(u'output', u'output_source_path')
    namespace = config.get(u'output', u'namespace')

    if output_header_path != None and not os.path.exists(output_header_path):
        return (1, u'Invalid output header path')
    if output_source_path != None and not os.path.exists(output_source_path):
        return (1, u'Invalid output source path')
    if output_header_path == None:
        output_header_path = u'./'
    if output_source_path == None:
        output_source_path = u'./'

    c11_type_library = C11TypeLibrary()
    c11_type_library.setVersion(major_version, minor_version, patch_version)
    (error_code, error_message) = c11_type_library.addSchemaDirectory(
        schema_directory, config)
    if error_code != 0:
        return (error_code, error_message)
    for extensions_schema_directory in extensions_schema_directories:
        (error_code, error_message) = c11_type_library.addSchemaDirectory(
            extensions_schema_directory, config)
        if error_code != 0:
            print(error_code, error_message)
            return (error_code, error_message)
    (error_code, error_message) = c11_type_library.preprocess()
    if error_code != 0:
        return (error_code, error_message)
    (error_code, error_message) = c11_type_library.generate(
        code_file_name,
        outputHeaderPath=output_header_path,
        outputSourcePath=output_source_path,
        nameSpace=namespace,
        config=config)
    if error_code != 0:
        return (error_code, error_message)
    return (0, u'')
Exemple #37
0
class Mainwindow(QtGui.QWidget):

    def __init__(self):
        # super(Mainwindow, self).__init__()
        QtGui.QMainWindow.__init__(self)
        print("Initializing..........")

        self.home = os.path.expanduser("~")
        # Reading all variables from config.ini
        self.parser = SafeConfigParser()
        self.parser.read(
            os.path.join(self.home, os.path.join('.nghdl', 'config.ini'))
        )
        self.ngspice_home = self.parser.get('NGSPICE', 'NGSPICE_HOME')
        self.release_dir = self.parser.get('NGSPICE', 'RELEASE')
        self.src_home = self.parser.get('SRC', 'SRC_HOME')
        self.licensefile = self.parser.get('SRC', 'LICENSE')
        # Printing LICENCE file on terminal
        fileopen = open(self.licensefile, 'r')
        print(fileopen.read())
        fileopen.close()
        self.file_list = []       # to keep the supporting files
        self.errorFlag = False    # to keep the check of "make install" errors
        self.initUI()

    def initUI(self):
        self.uploadbtn = QtGui.QPushButton('Upload')
        self.uploadbtn.clicked.connect(self.uploadModel)
        self.exitbtn = QtGui.QPushButton('Exit')
        self.exitbtn.clicked.connect(self.closeWindow)
        self.browsebtn = QtGui.QPushButton('Browse')
        self.browsebtn.clicked.connect(self.browseFile)
        self.addbtn = QtGui.QPushButton('Add Files')
        self.addbtn.clicked.connect(self.addFiles)
        self.removebtn = QtGui.QPushButton('Remove Files')
        self.removebtn.clicked.connect(self.removeFiles)
        self.ledit = QtGui.QLineEdit(self)
        self.sedit = QtGui.QTextEdit(self)
        self.process = QtCore.QProcess(self)
        self.termedit = QtGui.QTextEdit(self)
        self.termedit.setReadOnly(1)
        pal = QtGui.QPalette()
        bgc = QtGui.QColor(0, 0, 0)
        pal.setColor(QtGui.QPalette.Base, bgc)
        self.termedit.setPalette(pal)
        self.termedit.setStyleSheet("QTextEdit {color:white}")

        # Creating gridlayout
        grid = QtGui.QGridLayout()
        grid.setSpacing(5)
        grid.addWidget(self.ledit, 1, 0)
        grid.addWidget(self.browsebtn, 1, 1)
        grid.addWidget(self.sedit, 2, 0, 4, 1)
        grid.addWidget(self.addbtn, 2, 1)
        grid.addWidget(self.removebtn, 3, 1)
        grid.addWidget(self.termedit, 6, 0, 10, 1)
        grid.addWidget(self.uploadbtn, 17, 0)
        grid.addWidget(self.exitbtn, 17, 1)

        self.setLayout(grid)
        self.setGeometry(300, 300, 600, 600)
        self.setWindowTitle("Ngspice Digital Model Creator")
        # self.setWindowIcon(QtGui.QIcon('logo.png'))
        self.show()

    def closeWindow(self):
        try:
            self.process.close()
        except BaseException:
            pass
        print("Close button clicked")
        sys.exit()

    def browseFile(self):
        print("Browse button clicked")
        self.filename = QtGui.QFileDialog.getOpenFileName(
            self, 'Open File', '.')
        self.ledit.setText(self.filename)
        print("Vhdl file uploaded to process :", self.filename)

    def addFiles(self):
        print("Starts adding supporting files")
        title = self.addbtn.text()
        for file in QtGui.QFileDialog.getOpenFileNames(self, title):
            self.sedit.append(str(file))
            self.file_list.append(file)
        print("Supporting Files are :", self.file_list)

    def removeFiles(self):
        self.fileRemover = FileRemover(self)

    # Check extensions of all supporting files
    def checkSupportFiles(self):
        nonvhdl_count = 0
        for file in self.file_list:
            extension = os.path.splitext(str(file))[1]
            if extension != ".vhdl":
                nonvhdl_count += 1
                self.file_list.remove(file)

        if nonvhdl_count > 0:
            QtGui.QMessageBox.critical(
                self, 'Critical', '''<b>Important Message.</b>
                <br/><br/>Supporting files should be <b>.vhdl</b> file '''
            )

    def createModelDirectory(self):
        print("Create Model Directory Called")
        self.digital_home = self.parser.get('NGSPICE', 'DIGITAL_MODEL')
        os.chdir(self.digital_home)
        print("Current Working Directory Changed to", os.getcwd())
        self.modelname = os.path.basename(str(self.filename)).split('.')[0]
        print("Model to be created :", self.modelname)
        # Looking if model directory is present or not
        if os.path.isdir(self.modelname):
            print("Model Already present")
            ret = QtGui.QMessageBox.warning(
                self, "Warning",
                "<b>This model already exist. Do you want to " +
                "overwrite it?</b><br/> If yes press ok, else cancel it and " +
                "change the name of your vhdl file.",
                QtGui.QMessageBox.Ok, QtGui.QMessageBox.Cancel
            )
            if ret == QtGui.QMessageBox.Ok:
                print("Overwriting existing model " + self.modelname)
                if os.name == 'nt':
                    cmd = "rmdir " + self.modelname + "/s /q"
                else:
                    cmd = "rm -rf " + self.modelname
                # process = subprocess.Popen(
                #     cmd, stdout=subprocess.PIPE,
                #     stderr=subprocess.PIPE, shell=True
                # )
                subprocess.call(cmd, shell=True)
                os.mkdir(self.modelname)
            else:
                print("Exiting application")
                sys.exit()
        else:
            print("Creating model " + self.modelname + " directory")
            os.mkdir(self.modelname)

    def addingModelInModpath(self):
        print("Adding Model " + self.modelname +
              " in Modpath file " + self.digital_home)
        # Adding name of model in the modpath file
        # Check if the string is already in the file
        with open(self.digital_home + "/modpath.lst", 'r+') as f:
            flag = 0
            for line in f:
                if line.strip() == self.modelname:
                    print("Found model "+self.modelname+" in the modpath.lst")
                    flag = 1
                    break

            if flag == 0:
                print("Adding model name "+self.modelname+" into modpath.lst")
                f.write(self.modelname + "\n")
            else:
                print("Model name is already into modpath.lst")

    def createModelFiles(self):
        print("Create Model Files Called")
        os.chdir(self.cur_dir)
        print("Current Working directory changed to " + self.cur_dir)

        # Generate model corresponding to the uploaded VHDL file
        model = ModelGeneration(str(self.ledit.text()))
        model.readPortInfo()
        model.createCfuncModFile()
        model.createIfSpecFile()
        model.createTestbench()
        model.createServerScript()
        model.createSockScript()

        # Moving file to model directory
        path = os.path.join(self.digital_home, self.modelname)
        shutil.move("cfunc.mod", path)
        shutil.move("ifspec.ifs", path)

        # Creating directory inside model directoy
        print("Creating DUT directory at " + os.path.join(path, "DUTghdl"))
        os.mkdir(path + "/DUTghdl/")
        print("Copying required file to DUTghdl directory")
        shutil.move("connection_info.txt", path + "/DUTghdl/")
        shutil.move("start_server.sh", path + "/DUTghdl/")
        shutil.move("sock_pkg_create.sh", path + "/DUTghdl/")
        shutil.move(self.modelname + "_tb.vhdl", path + "/DUTghdl/")

        shutil.copy(str(self.filename), path + "/DUTghdl/")
        shutil.copy(os.path.join(self.home, self.src_home) +
                    "/src/ghdlserver/compile.sh", path + "/DUTghdl/")
        shutil.copy(os.path.join(self.home, self.src_home) +
                    "/src/ghdlserver/uthash.h", path + "/DUTghdl/")
        shutil.copy(os.path.join(self.home, self.src_home) +
                    "/src/ghdlserver/ghdlserver.c", path + "/DUTghdl/")
        shutil.copy(os.path.join(self.home, self.src_home) +
                    "/src/ghdlserver/ghdlserver.h", path + "/DUTghdl/")
        shutil.copy(os.path.join(self.home, self.src_home) +
                    "/src/ghdlserver/Utility_Package.vhdl", path + "/DUTghdl/")
        shutil.copy(os.path.join(self.home, self.src_home) +
                    "/src/ghdlserver/Vhpi_Package.vhdl", path + "/DUTghdl/")

        if os.name == 'nt':
            shutil.copy(os.path.join(self.home, self.src_home) +
                        "/src/ghdlserver/libws2_32.a", path + "/DUTghdl/")

        for file in self.file_list:
            shutil.copy(str(file), path + "/DUTghdl/")

        os.chdir(path + "/DUTghdl")
        if os.name == 'nt':
            # path to msys bin directory where bash is located
            self.msys_bin = self.parser.get('COMPILER', 'MSYS_HOME')
            subprocess.call(self.msys_bin+"/bash.exe " +
                            path + "/DUTghdl/compile.sh", shell=True)
            subprocess.call(self.msys_bin+"/bash.exe -c " +
                            "'chmod a+x start_server.sh'", shell=True)
            subprocess.call(self.msys_bin+"/bash.exe -c " +
                            "'chmod a+x sock_pkg_create.sh'", shell=True)
        else:
            subprocess.call("bash " + path + "/DUTghdl/compile.sh", shell=True)
            subprocess.call("chmod a+x start_server.sh", shell=True)
            subprocess.call("chmod a+x sock_pkg_create.sh", shell=True)

        os.remove("compile.sh")
        os.remove("ghdlserver.c")

    # Slot to redirect stdout and stderr to window console
    @QtCore.pyqtSlot()
    def readAllStandard(self):
        self.termedit.append(
            str(self.process.readAllStandardOutput().data(), encoding='utf-8')
        )
        stderror = self.process.readAllStandardError()
        if stderror.toUpper().contains("ERROR"):
            self.errorFlag = True
        self.termedit.append(str(stderror.data(), encoding='utf-8'))

    def runMake(self):
        print("run Make Called")
        self.release_home = self.parser.get('NGSPICE', 'RELEASE')
        path_icm = os.path.join(self.release_home, "src/xspice/icm")
        os.chdir(path_icm)

        try:
            if os.name == 'nt':
                # path to msys bin directory where make is located
                self.msys_bin = self.parser.get('COMPILER', 'MSYS_HOME')
                cmd = self.msys_bin+"\\make.exe"
            else:
                cmd = " make"

            print("Running Make command in " + path_icm)
            path = os.getcwd()  # noqa
            self.process = QtCore.QProcess(self)
            self.process.start(cmd)
            print("make command process pid ---------- >", self.process.pid())
        except BaseException:
            print("There is error in 'make' ")
            sys.exit()

    def runMakeInstall(self):
        print("run Make Install Called")
        try:
            if os.name == 'nt':
                self.msys_bin = self.parser.get('COMPILER', 'MSYS_HOME')
                cmd = self.msys_bin+"\\make.exe install"
            else:
                cmd = " make install"
            print("Running Make Install")
            path = os.getcwd()  # noqa
            try:
                self.process.close()
            except BaseException:
                pass

            self.process = QtCore.QProcess(self)
            self.process.start(cmd)
            self.process.finished.connect(self.createSchematicLib)
            QtCore.QObject.connect(
                self.process, QtCore.SIGNAL("readyReadStandardOutput()"),
                self, QtCore.SLOT("readAllStandard()")
            )
            os.chdir(self.cur_dir)

        except BaseException:
            print("There is error in 'make install' ")
            sys.exit()

    def createSchematicLib(self):
        if Appconfig.esimFlag == 1:
            if not self.errorFlag:
                print('Creating library files................................')
                schematicLib = AutoSchematic(self.modelname)
                schematicLib.createKicadLibrary()
            else:
                QtGui.QMessageBox.critical(
                    self, 'Error', '''Cannot create Schematic Library of ''' +
                    '''your model. Resolve the <b>errors</b> shown on ''' +
                    '''console of NGHDL window. '''
                )
        else:
            QtGui.QMessageBox.information(
                self, 'Message', '''<b>Important Message</b><br/><br/>''' +
                '''To create Schematic Library of your model, ''' +
                '''use NGHDL through <b>eSim</b> '''
            )

    def uploadModel(self):
        print("Upload button clicked")
        try:
            self.process.close()
        except BaseException:
            pass
        try:
            self.file_extension = os.path.splitext(str(self.filename))[1]
            print("Uploaded File extension :" + self.file_extension)
            self.cur_dir = os.getcwd()
            print("Current Working Directory :" + self.cur_dir)
            self.checkSupportFiles()
            if self.file_extension == ".vhdl":
                self.errorFlag = False
                self.createModelDirectory()
                self.addingModelInModpath()
                self.createModelFiles()
                self.runMake()
                self.runMakeInstall()
            else:
                QtGui.QMessageBox.information(
                    self, 'Message', '''<b>Important Message.</b><br/>''' +
                    '''<br/>This accepts only <b>.vhdl</b> file '''
                )
        except Exception as e:
            QtGui.QMessageBox.critical(self, 'Error', str(e))
Exemple #38
0
from configparser import SafeConfigParser
from szlib.szcounts import ClusterCosmology

cosmologyName = 'LACosmology'  #Planck15'
iniFile = "input/params.ini"
Config = SafeConfigParser()
Config.optionxform = str
Config.read(iniFile)

lmax = Config.getint('general', 'camb_ellmax')
cosmoDict = dictFromSection(Config, cosmologyName)
constDict = dictFromSection(Config, 'constants')
cc = ClusterCosmology(cosmoDict, constDict, lmax)
theory = cc.theory

expX = Config.get('general', 'X')
expY = Config.get('general', 'Y')

beamX,beamFileX,fgFileX,noiseTX,noisePX,tellminX,tellmaxX,pellminX, \
    pellmaxX,lxcutTX,lycutTX,lxcutPX,lycutPX,lkneeTX,alphaTX,lkneePX,alphaPX = getLensParams(Config,expX)
beamY,beamFileY,fgFileY,noiseTY,noisePY,tellminY,tellmaxY,pellminY, \
    pellmaxY,lxcutTY,lycutTY,lxcutPY,lycutPY,lkneeTY,alphaTY,lkneePY,alphaPY = getLensParams(Config,expY)

TCMB = Config.getfloat('general', 'TCMB')
gradCut = Config.getint('general', 'gradCut')
halo = Config.getboolean('general', 'halo')
fsky = Config.getfloat('general', 'sqDeg') / 41250.

kmin = 40

deg = 10.
Exemple #39
0
else:
    sys.exit("Please declare the environment variable 'SUMO_HOME'")

import traci
from sumo_env import SumoEnvironment
from agent import Agent
from epsilon_greedy import EpsilonGreedy
from configparser import SafeConfigParser
from util import save_csv, plot, save_csv_1

if __name__ == '__main__':

    # load default config
    rl_params = SafeConfigParser()
    rl_params.read('rl.ini')
    simulation_step = int(rl_params.get('DEFAULT', 'num_simulations'))

    # define output csv file
    experiment_time = str(datetime.now()).split('.')[0]
    out_csv = 'outputs/{}'.format(experiment_time)
    result = 'outputs/result'
    # init sumo environment

    signal_type = rl_params.get('DEFAULT', 'signal')

    if signal_type == 'one_way':
        signal_phase = [traci.trafficlight.Phase(42, "GGrr"),  # north-south
                  traci.trafficlight.Phase(2, "yyrr"),
                  traci.trafficlight.Phase(42, "rrGG"),  # west-east
                  traci.trafficlight.Phase(2, "rryy")
                 ]
Exemple #40
0
class ConfigEd(object):
    """ConfigEd(filename) creates a full powered config editor for wesen"""
    def __init__(self, filename):
        self.configfile = filename
        self.configParser = SafeConfigParser()
        self.alwaysDefaults = False

    def printConfig(self):
        """prints the configfile to screen"""
        print(("%s:" % self.configfile))
        for line in open(self.configfile).readlines():
            print(line[:-1])
            # -1 for \n removal
        print(".")

    def getConfig(self):
        """getConfig() returns the config dict.

        if a section, option or value is not found,
        the default values will be returned.
        """
        if (os.path.exists(self.configfile)):
            self.configParser.read(self.configfile)
            result = {}
            for entry in CONFIG_OPTIONS:
                (section, options) = entry
                result[section] = {}
                if (self.configParser.has_section(section)):
                    for option in options:
                        (key, entryType) = option
                        result[section][key] = \
                            self.getEntryFromConfigParser(
                                section, key, entryType)
            return result
        else:
            self.writeDefaults()
            return self.getConfig()

    def getEntryFromConfigParser(self, section, key, entryType):
        """depending on entryType,
        calls the appropriate getter from self.configParser"""
        value = None
        if (entryType == str):
            value = self.configParser.get(section, key)
        elif (entryType == int):
            value = self.configParser.getint(section, key)
        elif (entryType == bool):
            value = self.configParser.getboolean(section, key)
        elif (entryType == float):
            value = self.configParser.getfloat(section, key)
        if (value is None):
            value = CONFIG_DEFAULTS[section][key]
        return value

    def writeDefaults(self):
        """write config defaults to file."""
        self.alwaysDefaults = True
        self.edit()

    def edit(self):
        """Interactive config-file editing;

        It will ask the user every single option possible,
        always showing the default values and sometimes a comment,
        making it easier to understand the configfile for newbies.
        """
        if (self.alwaysDefaults):
            write = True
        elif (os.path.exists(self.configfile)):
            write = (input(STRING_ERROR_FILEEXISTS % self.configfile) == "y")
        else:
            write = True
        if (write):
            self.configParser.read(self.configfile)
            for entry in CONFIG_OPTIONS:
                (section, options) = entry
                if (not self.configParser.has_section(section)):
                    self.configParser.add_section(section)
                if (not self.alwaysDefaults):
                    print("[%s]" % (section))
                for option in options:
                    key = option[0]
                    self.setDefInputStandard(section, key)
            self.configParser.write(open(self.configfile, "w"))
            print((STRING_MESSAGE_WROTE % self.configfile))
        else:
            print((STRING_ERROR_NOTWROTE % self.configfile))

    def setDefInputStandard(self, section, key):
        """fetches explanation from .strings
        and default value from .defaults"""
        # TODO why upper? we should have lower-case here.
        explanationString = STRING_CONFIGED[section.upper()][key.upper()]
        self.configParser.set(
            section, key,
            str(
                self.def_input(CONFIG_DEFAULTS[section][key],
                               explanationString)))

    def def_input(self, default, msg):
        """derived from raw_input,
        def_input(default,prompt) returns a user input or,
        if blank, the specified default.
        """
        if (not self.alwaysDefaults):
            try:
                result = input("# default: %s\t%s" % (default, msg))
            except EOFError:
                self.alwaysDefaults = True
                return default
            print("")
            if (not result):
                return default
            else:
                return result
        else:
            return default
Exemple #41
0
    app.addButton("Directory", app_add_dir, 7, 4, 1)

    app.setSticky("new")
    app.addLabel("l5", "Directories:", 8, 0)
    app.addListBox("Directories", row=9, rowspan=6)

    app.setEntryDefault("sys_name", "System Name")
    app.setEntryDefault("db_name", "Database Name")
    app.setEntryDefault("db_schema", "Database Schema")
    app.setEntryDefault("dir_path", "-- enter a directory --")

    # WAIT: Splitt ut som egen funksjon:
    if os.path.exists(conf_file):
        config.remove_section("DOCUMENTS")
        if config.has_option('SYSTEM', 'sys_name'):
            conf_sys_name = config.get('SYSTEM', 'sys_name')
            if not conf_sys_name == '':
                app.setEntry("sys_name", conf_sys_name)
        if config.has_option('DATABASE', 'db_name'):
            conf_db_name = config.get('DATABASE', 'db_name')
            if not conf_db_name == '':
                app.setEntry("db_name", conf_db_name)
        if config.has_option('DATABASE', 'db_schema'):
            conf_db_schema = config.get('DATABASE', 'db_schema')
            if not conf_db_schema == '':
                app.setEntry("db_schema", conf_db_schema)

    app.addButtons(
        ["Submit", "Clear ", " Quit "], [submit, clear, quit], row=16)
    app.go()
Exemple #42
0
        print("Overriding alpha with ", alphaTOverride)

    #doRayDeriv = not(args.skipRay)
    doLens = not (args.skipLens)
    doSZ = not (args.skipSZ)

    if doLens:
        assert lensName != "", "ERROR: You didn't specify a lensName. If you don't want to do lensing, add --skip-lensing."

    assert doLens or doSZ, "ERROR: Nothing to do."

    iniFile = "input/pipeline.ini"
    Config = SafeConfigParser()
    Config.optionxform = str
    Config.read(iniFile)
    version = Config.get('general', 'version')
    pzcut = Config.getfloat('general', 'photoZCutOff')

    fparams = {}
    for (key, val) in Config.items('params'):
        if ',' in val:
            param, step = val.split(',')
            if key == 'sigR':
                rayFid = float(param)
                rayStep = float(step)
            fparams[key] = float(param)
        else:
            fparams[key] = float(val)

    from orphics.io import dict_from_section, list_from_config
Exemple #43
0
class Buildozer:

    ERROR = 0
    INFO = 1
    DEBUG = 2

    standard_cmds = ('distclean', 'update', 'debug', 'release',
                     'deploy', 'run', 'serve')

    def __init__(self, filename='buildozer.spec', target=None):
        self.log_level = 2
        self.environ = {}
        self.specfilename = filename
        self.state = None
        self.build_id = None
        self.config_profile = ''
        self.config = SafeConfigParser(allow_no_value=True)
        self.config.optionxform = lambda value: value
        self.config.getlist = self._get_config_list
        self.config.getlistvalues = self._get_config_list_values
        self.config.getdefault = self._get_config_default
        self.config.getbooldefault = self._get_config_bool
        self.config.getrawdefault = self._get_config_raw_default

        if exists(filename):
            self.config.read(filename, "utf-8")
            self.check_configuration_tokens()

        # Check all section/tokens for env vars, and replace the
        # config value if a suitable env var exists.
        set_config_from_envs(self.config)

        try:
            self.log_level = int(self.config.getdefault(
                'buildozer', 'log_level', '2'))
        except Exception:
            pass

        self.user_bin_dir = self.config.getdefault('buildozer', 'bin_dir', None)
        if self.user_bin_dir:
            self.user_bin_dir = realpath(join(self.root_dir, self.user_bin_dir))

        self.targetname = None
        self.target = None
        if target:
            self.set_target(target)

    def set_target(self, target):
        '''Set the target to use (one of buildozer.targets, such as "android")
        '''
        self.targetname = target
        m = __import__('buildozer.targets.{0}'.format(target),
                       fromlist=['buildozer'])
        self.target = m.get_target(self)
        self.check_build_layout()
        self.check_configuration_tokens()

    def prepare_for_build(self):
        '''Prepare the build.
        '''
        assert(self.target is not None)
        if hasattr(self.target, '_build_prepared'):
            return

        self.info('Preparing build')

        self.info('Check requirements for {0}'.format(self.targetname))
        self.target.check_requirements()

        self.info('Install platform')
        self.target.install_platform()

        self.info('Check application requirements')
        self.check_application_requirements()

        self.info('Check garden requirements')
        self.check_garden_requirements()

        self.info('Compile platform')
        self.target.compile_platform()

        # flag to prevent multiple build
        self.target._build_prepared = True

    def build(self):
        '''Do the build.

        The target can set build_mode to 'release' or 'debug' before calling
        this method.

        (:meth:`prepare_for_build` must have been call before.)
        '''
        assert(self.target is not None)
        assert(hasattr(self.target, '_build_prepared'))

        if hasattr(self.target, '_build_done'):
            return

        # increment the build number
        self.build_id = int(self.state.get('cache.build_id', '0')) + 1
        self.state['cache.build_id'] = str(self.build_id)

        self.info('Build the application #{}'.format(self.build_id))
        self.build_application()

        self.info('Package the application')
        self.target.build_package()

        # flag to prevent multiple build
        self.target._build_done = True

    #
    # Log functions
    #

    def log(self, level, msg):
        if level > self.log_level:
            return
        if USE_COLOR:
            color = COLOR_SEQ(LOG_LEVELS_C[level])
            print(''.join((RESET_SEQ, color, '# ', msg, RESET_SEQ)))
        else:
            print('{} {}'.format(LOG_LEVELS_T[level], msg))

    def debug(self, msg):
        self.log(self.DEBUG, msg)

    def log_env(self, level, env):
        """dump env into debug logger in readable format"""
        self.log(level, "ENVIRONMENT:")
        for k, v in env.items():
            self.log(level, "    {} = {}".format(k, pformat(v)))

    def info(self, msg):
        self.log(self.INFO, msg)

    def error(self, msg):
        self.log(self.ERROR, msg)

    #
    # Internal check methods
    #

    def checkbin(self, msg, fn):
        self.debug('Search for {0}'.format(msg))
        if exists(fn):
            return realpath(fn)
        for dn in environ['PATH'].split(':'):
            rfn = realpath(join(dn, fn))
            if exists(rfn):
                self.debug(' -> found at {0}'.format(rfn))
                return rfn
        self.error('{} not found, please install it.'.format(msg))
        exit(1)

    def cmd(self, command, **kwargs):
        # prepare the environ, based on the system + our own env
        env = copy(environ)
        env.update(self.environ)

        # prepare the process
        kwargs.setdefault('env', env)
        kwargs.setdefault('stdout', PIPE)
        kwargs.setdefault('stderr', PIPE)
        kwargs.setdefault('close_fds', True)
        kwargs.setdefault('shell', True)
        kwargs.setdefault('show_output', self.log_level > 1)

        show_output = kwargs.pop('show_output')
        get_stdout = kwargs.pop('get_stdout', False)
        get_stderr = kwargs.pop('get_stderr', False)
        break_on_error = kwargs.pop('break_on_error', True)
        sensible = kwargs.pop('sensible', False)

        if not sensible:
            self.debug('Run {0!r}'.format(command))
        else:
            if type(command) in (list, tuple):
                self.debug('Run {0!r} ...'.format(command[0]))
            else:
                self.debug('Run {0!r} ...'.format(command.split()[0]))
        self.debug('Cwd {}'.format(kwargs.get('cwd')))

        # open the process
        if sys.platform == 'win32':
            kwargs.pop('close_fds', None)
        process = Popen(command, **kwargs)

        # prepare fds
        fd_stdout = process.stdout.fileno()
        fd_stderr = process.stderr.fileno()
        if fcntl:
            fcntl.fcntl(
                fd_stdout, fcntl.F_SETFL,
                fcntl.fcntl(fd_stdout, fcntl.F_GETFL) | os.O_NONBLOCK)
            fcntl.fcntl(
                fd_stderr, fcntl.F_SETFL,
                fcntl.fcntl(fd_stderr, fcntl.F_GETFL) | os.O_NONBLOCK)

        ret_stdout = [] if get_stdout else None
        ret_stderr = [] if get_stderr else None
        while True:
            try:
                readx = select.select([fd_stdout, fd_stderr], [], [])[0]
            except select.error:
                break
            if fd_stdout in readx:
                chunk = process.stdout.read()
                if not chunk:
                    break
                if get_stdout:
                    ret_stdout.append(chunk)
                if show_output:
                    stdout.write(chunk.decode('utf-8', 'replace'))
            if fd_stderr in readx:
                chunk = process.stderr.read()
                if not chunk:
                    break
                if get_stderr:
                    ret_stderr.append(chunk)
                if show_output:
                    stderr.write(chunk.decode('utf-8', 'replace'))

            stdout.flush()
            stderr.flush()

        process.communicate()
        if process.returncode != 0 and break_on_error:
            self.error('Command failed: {0}'.format(command))
            self.log_env(self.ERROR, kwargs['env'])
            self.error('')
            self.error('Buildozer failed to execute the last command')
            if self.log_level <= self.INFO:
                self.error('If the error is not obvious, please raise the log_level to 2')
                self.error('and retry the latest command.')
            else:
                self.error('The error might be hidden in the log above this error')
                self.error('Please read the full log, and search for it before')
                self.error('raising an issue with buildozer itself.')
            self.error('In case of a bug report, please add a full log with log_level = 2')
            raise BuildozerCommandException()
        if ret_stdout:
            ret_stdout = b''.join(ret_stdout)
        if ret_stderr:
            ret_stderr = b''.join(ret_stderr)
        return (ret_stdout.decode('utf-8', 'ignore') if ret_stdout else None,
                ret_stderr.decode('utf-8') if ret_stderr else None,
                process.returncode)

    def cmd_expect(self, command, **kwargs):
        from pexpect import spawnu

        # prepare the environ, based on the system + our own env
        env = copy(environ)
        env.update(self.environ)

        # prepare the process
        kwargs.setdefault('env', env)
        kwargs.setdefault('show_output', self.log_level > 1)
        sensible = kwargs.pop('sensible', False)
        show_output = kwargs.pop('show_output')

        if show_output:
            kwargs['logfile'] = codecs.getwriter('utf8')(stdout.buffer)

        if not sensible:
            self.debug('Run (expect) {0!r}'.format(command))
        else:
            self.debug('Run (expect) {0!r} ...'.format(command.split()[0]))

        self.debug('Cwd {}'.format(kwargs.get('cwd')))
        return spawnu(command, **kwargs)

    def check_configuration_tokens(self):
        '''Ensure the spec file is 'correct'.
        '''
        self.info('Check configuration tokens')
        self.migrate_configuration_tokens()
        get = self.config.getdefault
        errors = []
        adderror = errors.append
        if not get('app', 'title', ''):
            adderror('[app] "title" is missing')
        if not get('app', 'source.dir', ''):
            adderror('[app] "source.dir" is missing')

        package_name = get('app', 'package.name', '')
        if not package_name:
            adderror('[app] "package.name" is missing')
        elif package_name[0] in map(str, range(10)):
            adderror('[app] "package.name" may not start with a number.')

        version = get('app', 'version', '')
        version_regex = get('app', 'version.regex', '')
        if not version and not version_regex:
            adderror('[app] One of "version" or "version.regex" must be set')
        if version and version_regex:
            adderror('[app] Conflict between "version" and "version.regex"'
                     ', only one can be used.')
        if version_regex and not get('app', 'version.filename', ''):
            adderror('[app] "version.filename" is missing'
                     ', required by "version.regex"')

        orientation = get('app', 'orientation', 'landscape')
        if orientation not in ('landscape', 'portrait', 'all', 'sensorLandscape'):
            adderror('[app] "orientation" have an invalid value')

        if errors:
            self.error('{0} error(s) found in the buildozer.spec'.format(
                len(errors)))
            for error in errors:
                print(error)
            exit(1)

    def migrate_configuration_tokens(self):
        config = self.config
        if config.has_section("app"):
            migration = (
                ("android.p4a_dir", "p4a.source_dir"),
                ("android.p4a_whitelist", "android.whitelist"),
                ("android.bootstrap", "p4a.bootstrap"),
                ("android.branch", "p4a.branch"),
                ("android.p4a_whitelist_src", "android.whitelist_src"),
                ("android.p4a_blacklist_src", "android.blacklist_src")
            )
            for entry_old, entry_new in migration:
                if not config.has_option("app", entry_old):
                    continue
                value = config.get("app", entry_old)
                config.set("app", entry_new, value)
                config.remove_option("app", entry_old)
                self.error("In section [app]: {} is deprecated, rename to {}!".format(
                    entry_old, entry_new))

    def check_build_layout(self):
        '''Ensure the build (local and global) directory layout and files are
        ready.
        '''
        self.info('Ensure build layout')

        if not exists(self.specfilename):
            print('No {0} found in the current directory. Abandon.'.format(
                self.specfilename))
            exit(1)

        # create global dir
        self.mkdir(self.global_buildozer_dir)
        self.mkdir(self.global_cache_dir)

        # create local .buildozer/ dir
        self.mkdir(self.buildozer_dir)
        # create local bin/ dir
        self.mkdir(self.bin_dir)

        self.mkdir(self.applibs_dir)
        self.state = JsonStore(join(self.buildozer_dir, 'state.db'))

        target = self.targetname
        if target:
            self.mkdir(join(self.global_platform_dir, target, 'platform'))
            self.mkdir(join(self.buildozer_dir, target, 'platform'))
            self.mkdir(join(self.buildozer_dir, target, 'app'))

    def check_application_requirements(self):
        '''Ensure the application requirements are all available and ready to be
        packaged as well.
        '''
        requirements = self.config.getlist('app', 'requirements', '')
        target_available_packages = self.target.get_available_packages()
        if target_available_packages is True:
            # target handles all packages!
            return

        # remove all the requirements that the target can compile
        onlyname = lambda x: x.split('==')[0]  # noqa: E731
        requirements = [x for x in requirements if onlyname(x) not in
                        target_available_packages]

        if requirements and hasattr(sys, 'real_prefix'):
            e = self.error
            e('virtualenv is needed to install pure-Python modules, but')
            e('virtualenv does not support nesting, and you are running')
            e('buildozer in one. Please run buildozer outside of a')
            e('virtualenv instead.')
            exit(1)

        # did we already installed the libs ?
        if (
            exists(self.applibs_dir) and
            self.state.get('cache.applibs', '') == requirements
        ):
            self.debug('Application requirements already installed, pass')
            return

        # recreate applibs
        self.rmdir(self.applibs_dir)
        self.mkdir(self.applibs_dir)

        # ok now check the availability of all requirements
        for requirement in requirements:
            self._install_application_requirement(requirement)

        # everything goes as expected, save this state!
        self.state['cache.applibs'] = requirements

    def _install_application_requirement(self, module):
        self._ensure_virtualenv()
        self.debug('Install requirement {} in virtualenv'.format(module))
        self.cmd('pip install --target={} {}'.format(self.applibs_dir, module),
                 env=self.env_venv,
                 cwd=self.buildozer_dir)

    def check_garden_requirements(self):
        '''Ensure required garden packages are available to be included.
        '''
        garden_requirements = self.config.getlist('app',
                'garden_requirements', '')

        # have we installed the garden packages?
        if exists(self.gardenlibs_dir) and \
                self.state.get('cache.gardenlibs', '') == garden_requirements:
            self.debug('Garden requirements already installed, pass')
            return

        # we're going to reinstall all the garden libs.
        self.rmdir(self.gardenlibs_dir)

        # but if we don't have requirements, or if the user removed everything,
        # don't do anything.
        if not garden_requirements:
            self.state['cache.gardenlibs'] = garden_requirements
            return

        self._ensure_virtualenv()
        self.cmd('pip install Kivy-Garden==0.1.1', env=self.env_venv)

        # recreate gardenlibs
        self.mkdir(self.gardenlibs_dir)

        for requirement in garden_requirements:
            self._install_garden_package(requirement)

        # save gardenlibs state
        self.state['cache.gardenlibs'] = garden_requirements

    def _install_garden_package(self, package):
        self._ensure_virtualenv()
        self.debug('Install garden package {} in buildozer_dir'.format(package))
        self.cmd('garden install --app {}'.format(package),
                env=self.env_venv,
                cwd=self.buildozer_dir)

    def _ensure_virtualenv(self):
        if hasattr(self, 'venv'):
            return
        self.venv = join(self.buildozer_dir, 'venv')
        if not self.file_exists(self.venv):
            self.cmd('python3 -m venv ./venv',
                    cwd=self.buildozer_dir)

        # read virtualenv output and parse it
        output = self.cmd('bash -c "source venv/bin/activate && env"',
                get_stdout=True,
                cwd=self.buildozer_dir)
        self.env_venv = copy(self.environ)
        for line in output[0].splitlines():
            args = line.split('=', 1)
            if len(args) != 2:
                continue
            key, value = args
            if key in ('VIRTUAL_ENV', 'PATH'):
                self.env_venv[key] = value
        if 'PYTHONHOME' in self.env_venv:
            del self.env_venv['PYTHONHOME']

        # ensure any sort of compilation will fail
        self.env_venv['CC'] = '/bin/false'
        self.env_venv['CXX'] = '/bin/false'

    def mkdir(self, dn):
        if exists(dn):
            return
        self.debug('Create directory {0}'.format(dn))
        makedirs(dn)

    def rmdir(self, dn):
        if not exists(dn):
            return
        self.debug('Remove directory and subdirectory {}'.format(dn))
        rmtree(dn)

    def file_matches(self, patterns):
        from glob import glob
        result = []
        for pattern in patterns:
            matches = glob(expanduser(pattern.strip()))
            result.extend(matches)
        return result

    def file_exists(self, *args):
        return exists(join(*args))

    def file_rename(self, source, target, cwd=None):
        if cwd:
            source = join(cwd, source)
            target = join(cwd, target)
        self.debug('Rename {0} to {1}'.format(source, target))
        if not os.path.isdir(os.path.dirname(target)):
            self.error(('Rename {0} to {1} fails because {2} is not a '
                        'directory').format(source, target, target))
        move(source, target)

    def file_copy(self, source, target, cwd=None):
        if cwd:
            source = join(cwd, source)
            target = join(cwd, target)
        self.debug('Copy {0} to {1}'.format(source, target))
        copyfile(source, target)

    def file_extract(self, archive, cwd=None):
        if archive.endswith('.tgz') or archive.endswith('.tar.gz'):
            # XXX tarfile doesn't work for NDK-r8c :(
            #tf = tarfile.open(archive, 'r:*')
            #tf.extractall(path=cwd)
            #tf.close()
            self.cmd('tar xzf {0}'.format(archive), cwd=cwd)
            return

        if archive.endswith('.tbz2') or archive.endswith('.tar.bz2'):
            # XXX same as before
            self.cmd('tar xjf {0}'.format(archive), cwd=cwd)
            return

        if archive.endswith('.bin'):
            # To process the bin files for linux and darwin systems
            self.cmd('chmod a+x {0}'.format(archive),cwd=cwd)
            self.cmd('./{0}'.format(archive),cwd=cwd)
            return

        if archive.endswith('.zip'):
            self.cmd('unzip -q {}'.format(join(cwd, archive)), cwd=cwd)
            return

        raise Exception('Unhandled extraction for type {0}'.format(archive))

    def file_copytree(self, src, dest):
        print('copy {} to {}'.format(src, dest))
        if os.path.isdir(src):
            if not os.path.isdir(dest):
                os.makedirs(dest)
            files = os.listdir(src)
            for f in files:
                self.file_copytree(
                    os.path.join(src, f),
                    os.path.join(dest, f))
        else:
            copyfile(src, dest)

    def clean_platform(self):
        self.info('Clean the platform build directory')
        if not exists(self.platform_dir):
            return
        rmtree(self.platform_dir)

    def download(self, url, filename, cwd=None):
        def report_hook(index, blksize, size):
            if size <= 0:
                progression = '{0} bytes'.format(index * blksize)
            else:
                progression = '{0:.2f}%'.format(
                        index * blksize * 100. / float(size))
            if "CI" not in environ:
                stdout.write('- Download {}\r'.format(progression))
                stdout.flush()

        url = url + filename
        if cwd:
            filename = join(cwd, filename)
        if self.file_exists(filename):
            unlink(filename)

        self.debug('Downloading {0}'.format(url))
        urlretrieve(url, filename, report_hook)
        return filename

    def get_version(self):
        c = self.config
        has_version = c.has_option('app', 'version')
        has_regex = c.has_option('app', 'version.regex')
        has_filename = c.has_option('app', 'version.filename')

        # version number specified
        if has_version:
            if has_regex or has_filename:
                raise Exception(
                    'version.regex and version.filename conflict with version')
            return c.get('app', 'version')

        # search by regex
        if has_regex or has_filename:
            if has_regex and not has_filename:
                raise Exception('version.filename is missing')
            if has_filename and not has_regex:
                raise Exception('version.regex is missing')

            fn = c.get('app', 'version.filename')
            with open(fn) as fd:
                data = fd.read()
                regex = c.get('app', 'version.regex')
                match = search(regex, data)
                if not match:
                    raise Exception(
                        'Unable to find capture version in {0}\n'
                        ' (looking for `{1}`)'.format(fn, regex))
                version = match.groups()[0]
                self.debug('Captured version: {0}'.format(version))
                return version

        raise Exception('Missing version or version.regex + version.filename')

    def build_application(self):
        self._copy_application_sources()
        self._copy_application_libs()
        self._copy_garden_libs()
        self._add_sitecustomize()

    def _copy_application_sources(self):
        # XXX clean the inclusion/exclusion algo.
        source_dir = realpath(self.config.getdefault('app', 'source.dir', '.'))
        include_exts = self.config.getlist('app', 'source.include_exts', '')
        exclude_exts = self.config.getlist('app', 'source.exclude_exts', '')
        exclude_dirs = self.config.getlist('app', 'source.exclude_dirs', '')
        exclude_patterns = self.config.getlist('app', 'source.exclude_patterns', '')
        include_patterns = self.config.getlist('app',
                                               'source.include_patterns',
                                               '')
        app_dir = self.app_dir

        self.debug('Copy application source from {}'.format(source_dir))

        rmtree(self.app_dir)

        for root, dirs, files in walk(source_dir, followlinks=True):
            # avoid hidden directory
            if True in [x.startswith('.') for x in root.split(sep)]:
                continue

            # need to have sort-of normalization. Let's say you want to exclude
            # image directory but not images, the filtered_root must have a / at
            # the end, same for the exclude_dir. And then we can safely compare
            filtered_root = root[len(source_dir) + 1:].lower()
            if filtered_root:
                filtered_root += '/'

                # manual exclude_dirs approach
                is_excluded = False
                for exclude_dir in exclude_dirs:
                    if exclude_dir[-1] != '/':
                        exclude_dir += '/'
                    if filtered_root.startswith(exclude_dir.lower()):
                        is_excluded = True
                        break

                # pattern matching
                if not is_excluded:
                    # match pattern if not ruled out by exclude_dirs
                    for pattern in exclude_patterns:
                        if fnmatch(filtered_root, pattern):
                            is_excluded = True
                            break
                for pattern in include_patterns:
                    if fnmatch(filtered_root, pattern):
                        is_excluded = False
                        break

                if is_excluded:
                    continue

            for fn in files:
                # avoid hidden files
                if fn.startswith('.'):
                    continue

                # pattern matching
                is_excluded = False
                dfn = fn.lower()
                if filtered_root:
                    dfn = join(filtered_root, fn)
                for pattern in exclude_patterns:
                    if fnmatch(dfn, pattern):
                        is_excluded = True
                        break
                for pattern in include_patterns:
                    if fnmatch(dfn, pattern):
                        is_excluded = False
                        break
                if is_excluded:
                    continue

                # filter based on the extension
                # TODO more filters
                basename, ext = splitext(fn)
                if ext:
                    ext = ext[1:]
                    if include_exts and ext not in include_exts:
                        continue
                    if exclude_exts and ext in exclude_exts:
                        continue

                sfn = join(root, fn)
                rfn = realpath(join(app_dir, root[len(source_dir) + 1:], fn))

                # ensure the directory exists
                dfn = dirname(rfn)
                self.mkdir(dfn)

                # copy!
                self.debug('Copy {0}'.format(sfn))
                copyfile(sfn, rfn)

    def _copy_application_libs(self):
        # copy also the libs
        copytree(self.applibs_dir, join(self.app_dir, '_applibs'))

    def _copy_garden_libs(self):
        if exists(self.gardenlibs_dir):
            copytree(self.gardenlibs_dir, join(self.app_dir, 'libs'))

    def _add_sitecustomize(self):
        copyfile(join(dirname(__file__), 'sitecustomize.py'),
                join(self.app_dir, 'sitecustomize.py'))

        main_py = join(self.app_dir, 'service', 'main.py')
        if not self.file_exists(main_py):
            #self.error('Unable to patch main_py to add applibs directory.')
            return

        header = (b'import sys, os; '
                   b'sys.path = [os.path.join(os.getcwd(),'
                   b'"..", "_applibs")] + sys.path\n')
        with open(main_py, 'rb') as fd:
            data = fd.read()
        data = header + data
        with open(main_py, 'wb') as fd:
            fd.write(data)
        self.info('Patched service/main.py to include applibs')

    def namify(self, name):
        '''Return a "valid" name from a name with lot of invalid chars
        (allowed characters: a-z, A-Z, 0-9, -, _)
        '''
        return re.sub('[^a-zA-Z0-9_\-]', '_', name)

    @property
    def root_dir(self):
        return realpath(dirname(self.specfilename))

    @property
    def user_build_dir(self):
        """The user-provided build dir, if any."""
        # Check for a user-provided build dir
        # Check the (deprecated) builddir token, for backwards compatibility
        build_dir = self.config.getdefault('buildozer', 'builddir', None)
        if build_dir is not None:
            # for backwards compatibility, append .buildozer to builddir
            build_dir = join(build_dir, '.buildozer')
        build_dir = self.config.getdefault('buildozer', 'build_dir', build_dir)

        if build_dir is not None:
            build_dir = realpath(join(self.root_dir, build_dir))

        return build_dir

    @property
    def buildozer_dir(self):
        '''The directory in which to run the app build.'''
        if self.user_build_dir is not None:
            return self.user_build_dir
        return join(self.root_dir, '.buildozer')

    @property
    def bin_dir(self):
        if self.user_bin_dir:
            return self.user_bin_dir
        return join(self.root_dir, 'bin')

    @property
    def platform_dir(self):
        return join(self.buildozer_dir, self.targetname, 'platform')

    @property
    def app_dir(self):
        return join(self.buildozer_dir, self.targetname, 'app')

    @property
    def applibs_dir(self):
        return join(self.buildozer_dir, 'applibs')

    @property
    def gardenlibs_dir(self):
        return join(self.buildozer_dir, 'libs')

    @property
    def global_buildozer_dir(self):
        return join(expanduser('~'), '.buildozer')

    @property
    def global_platform_dir(self):
        return join(self.global_buildozer_dir, self.targetname, 'platform')

    @property
    def global_packages_dir(self):
        return join(self.global_buildozer_dir, self.targetname, 'packages')

    @property
    def global_cache_dir(self):
        return join(self.global_buildozer_dir, 'cache')

    @property
    def package_full_name(self):
        package_name = self.config.getdefault('app', 'package.name', '')
        package_domain = self.config.getdefault('app', 'package.domain', '')
        if package_domain == '':
            return package_name
        return '{}.{}'.format(package_domain, package_name)

    #
    # command line invocation
    #

    def targets(self):
        for fn in listdir(join(dirname(__file__), 'targets')):
            if fn.startswith('.') or fn.startswith('__'):
                continue
            if not fn.endswith('.py'):
                continue
            target = fn[:-3]
            try:
                m = __import__('buildozer.targets.{0}'.format(target),
                        fromlist=['buildozer'])
                yield target, m
            except NotImplementedError:
                pass
            except:
                raise
                pass

    def usage(self):
        print('Usage:')
        print('    buildozer [--profile <name>] [--verbose] [target] <command>...')
        print('    buildozer --version')
        print('')
        print('Available targets:')
        targets = list(self.targets())
        for target, m in targets:
            try:
                doc = m.__doc__.strip().splitlines()[0].strip()
            except Exception:
                doc = '<no description>'
            print('  {0:<18} {1}'.format(target, doc))

        print('')
        print('Global commands (without target):')
        cmds = [x for x in dir(self) if x.startswith('cmd_')]
        for cmd in cmds:
            name = cmd[4:]
            meth = getattr(self, cmd)

            if not meth.__doc__:
                continue
            doc = [x for x in
                    meth.__doc__.strip().splitlines()][0].strip()
            print('  {0:<18} {1}'.format(name, doc))

        print('')
        print('Target commands:')
        print('  clean      Clean the target environment')
        print('  update     Update the target dependencies')
        print('  debug      Build the application in debug mode')
        print('  release    Build the application in release mode')
        print('  deploy     Deploy the application on the device')
        print('  run        Run the application on the device')
        print('  serve      Serve the bin directory via SimpleHTTPServer')

        for target, m in targets:
            mt = m.get_target(self)
            commands = mt.get_custom_commands()
            if not commands:
                continue
            print('')
            print('Target "{0}" commands:'.format(target))
            for command, doc in commands:
                if not doc:
                    continue
                doc = textwrap.fill(textwrap.dedent(doc).strip(), 59,
                                    subsequent_indent=' ' * 21)
                print('  {0:<18} {1}'.format(command, doc))

        print('')

    def run_default(self):
        self.check_build_layout()
        if 'buildozer:defaultcommand' not in self.state:
            print('No default command set.')
            print('Use "buildozer setdefault <command args...>"')
            print('Use "buildozer help" for a list of all commands"')
            exit(1)
        cmd = self.state['buildozer:defaultcommand']
        self.run_command(cmd)

    def run_command(self, args):
        while args:
            if not args[0].startswith('-'):
                break
            arg = args.pop(0)

            if arg in ('-v', '--verbose'):
                self.log_level = 2

            elif arg in ('-h', '--help'):
                self.usage()
                exit(0)

            elif arg in ('-p', '--profile'):
                self.config_profile = args.pop(0)

            elif arg == '--version':
                print('Buildozer {0}'.format(__version__))
                exit(0)

        self._merge_config_profile()

        self.check_root()

        if not args:
            self.run_default()
            return

        command, args = args[0], args[1:]
        cmd = 'cmd_{0}'.format(command)

        # internal commands ?
        if hasattr(self, cmd):
            getattr(self, cmd)(*args)
            return

        # maybe it's a target?
        targets = [x[0] for x in self.targets()]
        if command not in targets:
            print('Unknown command/target {}'.format(command))
            exit(1)

        self.set_target(command)
        self.target.run_commands(args)

    def check_root(self):
        '''If effective user id is 0, display a warning and require
        user input to continue (or to cancel)'''

        warn_on_root = self.config.getdefault('buildozer', 'warn_on_root', '1')
        try:
            euid = os.geteuid() == 0
        except AttributeError:
            if sys.platform == 'win32':
                import ctypes
            euid = ctypes.windll.shell32.IsUserAnAdmin() != 0
        if warn_on_root == '1' and euid:
            print('\033[91m\033[1mBuildozer is running as root!\033[0m')
            print('\033[91mThis is \033[1mnot\033[0m \033[91mrecommended, and may lead to problems later.\033[0m')
            cont = None
            while cont not in ('y', 'n'):
                cont = input('Are you sure you want to continue [y/n]? ')

            if cont == 'n':
                sys.exit()

    def cmd_init(self, *args):
        '''Create a initial buildozer.spec in the current directory
        '''
        if exists('buildozer.spec'):
            print('ERROR: You already have a buildozer.spec file.')
            exit(1)
        copyfile(join(dirname(__file__), 'default.spec'), 'buildozer.spec')
        print('File buildozer.spec created, ready to customize!')

    def cmd_distclean(self, *args):
        '''Clean the whole Buildozer environment.
        '''
        print("Warning: Your ndk, sdk and all other cached packages will be"
              " removed. Continue? (y/n)")
        if sys.stdin.readline().lower()[0] == 'y':
            self.info('Clean the global build directory')
            if not exists(self.global_buildozer_dir):
                return
            rmtree(self.global_buildozer_dir)

    def cmd_appclean(self, *args):
        '''Clean the .buildozer folder in the app directory.

        This command specifically refuses to delete files in a
        user-specified build directory, to avoid accidentally deleting
        more than the user intends.
        '''
        if self.user_build_dir is not None:
            self.error(
                ('Failed: build_dir is specified as {} in the buildozer config. `appclean` will '
                 'not attempt to delete files in a user-specified build directory.').format(self.user_build_dir))
        elif exists(self.buildozer_dir):
            self.info('Deleting {}'.format(self.buildozer_dir))
            rmtree(self.buildozer_dir)
        else:
            self.error('{} already deleted, skipping.'.format(self.buildozer_dir))

    def cmd_help(self, *args):
        '''Show the Buildozer help.
        '''
        self.usage()

    def cmd_setdefault(self, *args):
        '''Set the default command to run when no arguments are given
        '''
        self.check_build_layout()
        self.state['buildozer:defaultcommand'] = args

    def cmd_version(self, *args):
        '''Show the Buildozer version
        '''
        print('Buildozer {0}'.format(__version__))

    def cmd_serve(self, *args):
        '''Serve the bin directory via SimpleHTTPServer
        '''
        try:
            from http.server import SimpleHTTPRequestHandler
            from socketserver import TCPServer
        except ImportError:
            from SimpleHTTPServer import SimpleHTTPRequestHandler
            from SocketServer import TCPServer

        os.chdir(self.bin_dir)
        handler = SimpleHTTPRequestHandler
        httpd = TCPServer(("", SIMPLE_HTTP_SERVER_PORT), handler)
        print("Serving via HTTP at port {}".format(SIMPLE_HTTP_SERVER_PORT))
        print("Press Ctrl+c to quit serving.")
        httpd.serve_forever()

    #
    # Private
    #

    def _merge_config_profile(self):
        profile = self.config_profile
        if not profile:
            return
        for section in self.config.sections():

            # extract the profile part from the section name
            # example: [app@default,hd]
            parts = section.split('@', 1)
            if len(parts) < 2:
                continue

            # create a list that contain all the profiles of the current section
            # ['default', 'hd']
            section_base, section_profiles = parts
            section_profiles = section_profiles.split(',')
            if profile not in section_profiles:
                continue

            # the current profile is one available in the section
            # merge with the general section, or make it one.
            if not self.config.has_section(section_base):
                self.config.add_section(section_base)
            for name, value in self.config.items(section):
                print('merged ({}, {}) into {} (profile is {})'.format(name,
                        value, section_base, profile))
                self.config.set(section_base, name, value)

    def _get_config_list_values(self, *args, **kwargs):
        kwargs['with_values'] = True
        return self._get_config_list(*args, **kwargs)

    def _get_config_list(self, section, token, default=None, with_values=False):
        # monkey-patch method for ConfigParser
        # get a key as a list of string, separated from the comma

        # check if an env var exists that should replace the file config
        set_config_token_from_env(section, token, self.config)

        # if a section:token is defined, let's use the content as a list.
        l_section = '{}:{}'.format(section, token)
        if self.config.has_section(l_section):
            values = self.config.options(l_section)
            if with_values:
                return ['{}={}'.format(key, self.config.get(l_section, key)) for
                        key in values]
            else:
                return [x.strip() for x in values]

        values = self.config.getdefault(section, token, '')
        if not values:
            return default
        values = values.split(',')
        if not values:
            return default
        return [x.strip() for x in values]

    def _get_config_default(self, section, token, default=None):
        # monkey-patch method for ConfigParser
        # get an appropriate env var if it exists, else
        # get a key in a section, or the default

        # check if an env var exists that should replace the file config
        set_config_token_from_env(section, token, self.config)

        if not self.config.has_section(section):
            return default
        if not self.config.has_option(section, token):
            return default
        return self.config.get(section, token)

    def _get_config_bool(self, section, token, default=False):
        # monkey-patch method for ConfigParser
        # get a key in a section, or the default

        # check if an env var exists that should replace the file config
        set_config_token_from_env(section, token, self.config)

        if not self.config.has_section(section):
            return default
        if not self.config.has_option(section, token):
            return default
        return self.config.getboolean(section, token)

    def _get_config_raw_default(self, section, token, default=None, section_sep="=", split_char=" "):
        l_section = '{}:{}'.format(section, token)
        if self.config.has_section(l_section):
            return [section_sep.join(item) for item in self.config.items(l_section)]
        if not self.config.has_option(section, token):
            return default.split(split_char)
        return self.config.get(section, token).split(split_char)
class FacebookScorer(EIScorer):

    def __init__(self, filterDict, confFile='/home/addi/egovbench/apps/pythons/egovbench_config.ini'):
        super(FacebookScorer, self).__init__(
            filterDict,
            FacebookMongoConnector(),
            'post_reactions.like',
            'post_commentCount',
            'post_shareCount',
            'page_id',
            'post_type'
        )

        self.filter_dict = filterDict
        self.fmc = FacebookMongoConnector()

        # Mengambil skor sentimen reaction dari file egovbench_config.ini
        self.confparser = SafeConfigParser()
        self.confparser.read(confFile)

    def getReactionScore(self, reaction):

        self.prompt('{} Calculating {} reaction score . . .'.format(json.dumps(self.filter_dict), reaction))

        ''' Template scoring untuk menghitung Reaction Score '''

        reactionCount = self.getFieldSum('post_reactions.{}'.format(reaction))

        total_reactionCount = self.getFieldSum('post_reactions.like') \
                            + self.getFieldSum('post_reactions.angry') \
                            + self.getFieldSum('post_reactions.wow') \
                            + self.getFieldSum('post_reactions.sad') \
                            + self.getFieldSum('post_reactions.haha') \
                            + self.getFieldSum('post_reactions.love')

        sentiment_score = self.confparser.get('SentimentScoreConfig', reaction)

        try:
            reaction_score = float(sentiment_score) * (reactionCount / total_reactionCount)
        except ZeroDivisionError as er:
            logging.warning(er)
            reaction_score = None

        self.prompt('{} {} reaction score: {}'.format(json.dumps(self.filter_dict), reaction, reaction_score))

        return reaction_score

    def getLikeScore(self):
        ''' Menghitung skor reaction 'like' '''
        return self.getReactionScore('like')

    def getAngryScore(self):
        ''' Menghitung skor reaction 'angry' '''
        return self.getReactionScore('angry')

    def getWowScore(self):
        ''' Menghitung skor reaction 'wow' '''
        return self.getReactionScore('wow')

    def getSadScore(self):
        ''' Menghitung skor reaction 'sad' '''
        return self.getReactionScore('sad')

    def getHahaScore(self):
        ''' Menghitung skor reaction 'haha' '''
        return self.getReactionScore('haha')

    def getLoveScore(self):
        ''' Menghitung skor reaction 'love' '''
        return self.getReactionScore('love')

    def getTotalReactionScore(self):

        ''' Menghitung total reaction score '''

        self.prompt('{} Calculating Reaction Score . . .'.format(json.dumps(self.filter_dict)))

        try:
            total_reaction_score = self.getLikeScore() \
                                 + self.getAngryScore() \
                                 + self.getWowScore() \
                                 + self.getSadScore() \
                                 + self.getHahaScore() \
                                 + self.getLoveScore()

        except (ValueError, KeyError, TypeError) as er:
            logging.warning(er)
            total_reaction_score = None

        self.prompt('{} Reaction Score: '.format(json.dumps(self.filter_dict)) + str(total_reaction_score))

        return total_reaction_score

    def getAccountStatisticDocument(self):

        self.prompt('{} Creating score document . . .'.format(json.dumps(self.filter_dict)))

        update_document = {}

        update_document['page_id'] = self.filter_dict['page_id'].lower()
        update_document['page_fanCount'] = self.getFollowerCount()

        update_document['result.statistics'] = {}
        update_document['result.statistics']['postCount'] = self.getPostCount()
        update_document['result.statistics']['commentCount'] = self.getFieldSum('post_commentCount')
        update_document['result.statistics']['reshareCount'] = self.getFieldSum('post_shareCount')

        update_document['result.statistics']['reactions'] = {}
        update_document['result.statistics']['reactions']['like'] = self.getFieldSum('post_reactions.like')
        update_document['result.statistics']['reactions']['angry'] = self.getFieldSum('post_reactions.angry')
        update_document['result.statistics']['reactions']['wow'] = self.getFieldSum('post_reactions.wow')
        update_document['result.statistics']['reactions']['sad'] = self.getFieldSum('post_reactions.sad')
        update_document['result.statistics']['reactions']['haha'] = self.getFieldSum('post_reactions.haha')
        update_document['result.statistics']['reactions']['love'] = self.getFieldSum('post_reactions.love')

        self.prompt('{} Score document created!'.format(json.dumps(self.filter_dict)))

        return update_document

    def getAccountScoreDocument(self):

        self.prompt('{} Creating score document . . .'.format(json.dumps(self.filter_dict)))

        update_document = {}

        update_document['page_id'] = self.filter_dict['page_id'].lower()

        update_document['result.scores'] = {}

        update_document['result.scores']['popularity_likeScore'] = {}
        update_document['result.scores']['popularity_likeScore']['popularity_likeScore_1'] = self.getP1()
        update_document['result.scores']['popularity_likeScore']['popularity_likeScore_3'] = self.getP3()

        update_document['result.scores']['commitment_commentScore'] = {}
        update_document['result.scores']['commitment_commentScore']['commitment_commentScore_1'] = self.getC1()
        update_document['result.scores']['commitment_commentScore']['commitment_commentScore_3'] = self.getC3()

        update_document['result.scores']['virality_shareScore'] = {}
        update_document['result.scores']['virality_shareScore']['virality_shareScore_1'] = self.getV1()
        update_document['result.scores']['virality_shareScore']['virality_shareScore_3'] = self.getV3()

        update_document['result.scores']['engagement_index_score'] = self.getEngagementIndexScore()

        engagement_index_score_normalized = self.getAccountNormalizedEngagementIndexScore()
        update_document['result.scores']['engagement_index_score_normalized'] = engagement_index_score_normalized * 100 if engagement_index_score_normalized else None

        update_document['result.scores']['reaction_score'] = {}
        update_document['result.scores']['reaction_score']['total'] = self.getTotalReactionScore()

        self.prompt('{} Score document created!'.format(json.dumps(self.filter_dict)))

        return update_document

    def getAccountPostTypeScoreDocument(self):

        update_document = {}

        post_types = self.fmc.getPostTypeDistinct('post_type')

        for post_type in post_types:

            self.filter_dict.pop('post_type', None)
            posttypeattribute = {'post_type': post_type}
            posttypeattribute.update(self.filter_dict)

            super(FacebookScorer, self).__init__(
                posttypeattribute,
                FacebookMongoConnector(),
                'post_reactions.like',
                'post_commentCount',
                'post_shareCount',
                'page_id',
                'post_type'
            )

            self.prompt('{} Creating score document . . .'.format(json.dumps(self.filter_dict)))

            update_document['page_id'] = self.filter_dict['page_id'].lower()

            update_document['post_type_result.%s.scores' % (post_type)] = {}
            update_document['post_type_result.%s.scores' % (post_type)]['engagement_index_score'] = self.getEngagementIndexScore()

            self.prompt('{} Score document created!'.format(json.dumps(self.filter_dict)))

        return update_document

    def getPostTypeStatisticDocument(self):

        update_document = {}

        post_types = self.fmc.getPostTypeDistinct('post_type')

        for post_type in post_types:

            posttypeattribute = {'post_type': post_type}

            super(FacebookScorer, self).__init__(
                posttypeattribute,
                FacebookMongoConnector(),
                'post_reactions.like',
                'post_commentCount',
                'post_shareCount',
                'page_id',
                'post_type'
            )

            self.prompt('{} Creating statistic document . . .'.format(json.dumps(self.filter_dict)))

            update_document['_id'] = posttypeattribute['post_type']

            update_document['result.statistics'] = {}
            update_document['result.statistics']['postCount'] = self.getPostCount()

            self.prompt('{} Statistic document created!'.format(json.dumps(self.filter_dict)))

            self.mongo_connector_class.updatePostTypeResult(update_document)

    def getPostTypeScoreDocument(self):

        update_document = {}

        post_types = self.fmc.getPostTypeDistinct('post_type')

        for post_type in post_types:

            posttypeattribute = {'post_type': post_type}

            super(FacebookScorer, self).__init__(
                posttypeattribute,
                FacebookMongoConnector(),
                'post_reactions.like',
                'post_commentCount',
                'post_shareCount',
                'page_id',
                'post_type'
            )

            self.prompt('{} Creating score document . . .'.format(json.dumps(self.filter_dict)))

            update_document['_id'] = posttypeattribute['post_type']

            update_document['result.scores'] = {}
            update_document['result.scores']['engagement_index_score'] = self.getEngagementIndexScore()

            self.prompt('{} Score document created!'.format(json.dumps(self.filter_dict)))

            self.mongo_connector_class.updatePostTypeResult(update_document)
Exemple #45
0
class UIConfig(Singleton):
    import util
    cfg = util.GenCfgPath('option', 'ui.cfg')

    #	print cfg
    #	cfg = 'option/ui.cfg'
    def __init__(self):
        self.__init()
        self.modified = False

    def __init(self):
        self.file = self.opencfg(UIConfig.cfg)
        self.cfg = Parser()
        self.cfg.readfp(self.file)
        self.file.close()

    def __setdefault(self):
        f = open(self.fn, 'w')
        f.write(default_file)
        f.close()

    def opencfg(self, fn):
        self.fn = fn
        try:
            return open(fn, 'r')
        except IOError:
            self.__setdefault()
            return open(fn, 'r')

    def release(self):
        if self.modified:
            f = open(self.fn, 'w')
            self.cfg.write(f)
            f.close()

    def setDefault(self):
        self.__setdefault()
        self.__init()

    def getMaximized(self):
        return self.cfg.get(WINDOW, MAXIMIZE).lower() == 'true'

    @modified_decorator
    def setMaximized(self, value):
        self.cfg.set(WINDOW, MAXIMIZE, str(bool(value)))

    def getWindowSize(self):
        return list(map(int, str2list(self.cfg.get(WINDOW, SIZE))))

    @modified_decorator
    def setWindowSize(self, value):
        self.cfg.set(WINDOW, SIZE, \
         sep.join(map(str, value)))

    def getWindowPos(self):
        return list(map(int, str2list(self.cfg.get(WINDOW, POS))))

    @modified_decorator
    def setWindowPos(self, value):
        self.cfg.set(WINDOW, POS, \
         sep.join(map(str, value)))

    def getLeftSplitProp(self):
        return self.cfg.getfloat(SPLIT, LEFT_SPLIT)

    @modified_decorator
    def setLeftSplitProp(self, prop):
        self.cfg.set(SPLIT, LEFT_SPLIT, '%.2f' % prop)

    def getUpSplitProp(self):
        return self.cfg.getfloat(SPLIT, UP_SPLIT)

    @modified_decorator
    def setUpSplitProp(self, prop):
        self.cfg.set(SPLIT, UP_SPLIT, '%.2f' % prop)

    def getRightSplitProp(self):
        return self.cfg.getfloat(SPLIT, RIGHT_SPLIT)

    @modified_decorator
    def setRightSplitProp(self, prop):
        self.cfg.set(SPLIT, RIGHT_SPLIT, '%.2f' % prop)

    def getLastDir(self):
        return self.cfg.get(DIRCTRL, DIR)

    @modified_decorator
    def setLastDir(self, value):
        import sys
        self.cfg.set(DIRCTRL, DIR, value.encode(sys.getfilesystemencoding()))
Exemple #46
0
class GPM( SearchGranules ):
    def __init__(self, prjName, prdLv, prdVer, **kwargs):
        '''
        prjName     : e.g.) 'GPM.KuPR'
        prdLv       : e.g.) 'L2'
        prdVer      : e.g.) '02'
        '''

        self.cfg        = SafeConfigParser( os.environ )
        self.cfg.read( 'config' )

        self.cfg._sections['Defaults'].update( kwargs )

        if self.cfg.get( 'Defaults','dataroot') == '':
            self.cfg.set('Defaults','dataroot', os.environ['PWD'])

        self.dataDir    = self.cfg.get('Defaults','dataroot')

        self.prjName    = prjName
        self.prdLv      = prdLv
        self.prdVer     = prdVer

        self.prdDir     = os.path.join( self.dataDir,
                                        self.prjName,
                                        self.prdLv,
                                        self.prdVer)

        self.cached     = self.cfg.get('Defaults', 'cached')
        self.cacheDir   = self.cfg.get('Defaults', 'cache_dir')

        fnPath          = {'TRMM': self.cfg.get('Defaults','hdf4_module'),
                           'GPM' : self.cfg.get('Defaults','hdf5_module')}[prjName.split('.')[0]]

        fnName          = fnPath.split('.')[-1]
        modPath         = '.'.join( fnPath.split('.')[:-1] )

        self.func_read  = getattr( importlib.import_module( modPath ), fnName )

        print(self.func_read)
        print(type(self.func_read))

        '''
        self.cacheDir   = os.path.join( self.dataDir,
                                        'cache.dim',
                                         self.prjName,
                                         self.prdLv,
                                         self.prdVer)

        self.prdDir     = '%s/%s/%s/%s'%(self.dataDir,
                                         self.prjName,
                                         self.prdLv,
                                         self.prdVer)

        self.cacheDir   = '%s/cache.dim/%s/%s/%s'%(self.dataDir,
                                         self.prjName,
                                         self.prdLv,
                                         self.prdVer)

        self.func_read  = {'TRMM': read_hdf4,
                           'GPM' : read_hdf5}[ prjName.split('.')[0] ]
        '''

        '''
        dictGrp = {'GPM.GMI':'S1',
                   'GPM.DPR':'NS',      # HS, MS, NS
                   'GPM.KaPR':'MS',     # HS, MS
                   'GPM.KuPR':'NS',}

        grpCode = dictGrp[ self.prjName ]
        '''



    def __call__(self, varName, sDTime, eDTime, BBox=None, res=None, delT=None):
        '''
        res     : spa. res. of 2d-array
        sDTime  : DTime bound left
        eDTime  : DTime bound right
        '''

        mapCode     = '^' + ''.join( str(res).split('.') )


        gpmData     = GPM_data()

        srcDir      = os.path.join( self.dataDir, self.prdDir )

        assert os.path.exists( srcDir ), '{} is not exists.'.format( srcDir )
        Granule     = self.search_granules( srcDir, sDTime, eDTime, BBox )

        outSize     = sum( [ len(gra[2]) for gra in Granule ] ), Granule[0][2].shape[1]
        Lat         = empty( outSize, 'float32')
        Lon         = empty( outSize, 'float32')
        aOut        = empty( outSize, 'float32' )
        DTime       = []


        prvI        = 0
        for granule in Granule:

            srcPath, dtime, lat, lon, idx   = granule

            gpmData.srcPath.append(srcPath)
            gpmData.recLen.append( len(dtime) )     # number of data record for each file

            nxtI            = prvI + len(dtime)

            aOut[prvI:nxtI] = self.func_read( srcPath, varName, idx.tolist() )
            Lat[prvI:nxtI]  = lat
            Lon[prvI:nxtI]  = lon
            DTime.extend(dtime)


            if res != None and delT == None:
                gpmData.griddata.append( granule2map( lat, lon, aOut[prvI:nxtI], BBox, res ) )
                gpmData.grid    = GridCoordinates(mapCode, BBox=BBox)

            prvI    = nxtI


        if delT != None:
            dtBnd   = dtrange(sDTime, eDTime, delT)

            gpmData.tbound  = map( None, dtBnd[:-1], dtBnd[1:] )
            gpmData.dtime   = bin_bytbound( DTime, dtBnd, DTime )
            gpmData.lat     = bin_bytbound( DTime, dtBnd, Lat )
            gpmData.lon     = bin_bytbound( DTime, dtBnd, Lon )
            gpmData.data    = bin_bytbound( DTime, dtBnd, aOut )


            if res != None:
                gpmData.griddata    = [ granule2map(lat, lon, a, BBox, res)
                                                     for lat, lon, a in map(None, gpmData.lat, gpmData.lon, gpmData.data) ]
                gpmData.grid    = GridCoordinates(mapCode, BBox=BBox)

        else:
            gpmData.dtime   = DTime
            gpmData.lat     = Lat
            gpmData.lon     = Lon
            gpmData.data    = aOut


        return gpmData
Exemple #47
0
config.read('config.ini')
sys.setrecursionlimit(10000)
mongoUtil = mongoUtil()

hidden_1 = config.getint('NeuralNetConfig', 'HIDDEN_1')
hidden_2 = config.getint('NeuralNetConfig', 'HIDDEN_2')
hidden_3 = config.getint('NeuralNetConfig', 'HIDDEN_3')
hidden_4 = config.getint('NeuralNetConfig', 'HIDDEN_4')

t_size = config.getfloat('Misc', 'TEST_SIZE_1')
t_size_2 = config.getfloat('Misc', 'TEST_SIZE_2')
rstate_2 = config.getint('Misc', 'RSTATE_1')
digit_num = config.getint('Misc', 'DIGIT_NUM')

rstate = config.getint('NeuralNetConfig', 'RSTATE')
activationFunc = config.get('NeuralNetConfig', 'ACTIVATION')
m_iter = config.getint('NeuralNetConfig', 'MAX_ITER')
l_rate = config.getfloat('NeuralNetConfig', 'LEARNING_RATE')
ver_bose = config.getboolean('NeuralNetConfig', 'VERBOSE')


class otc_model:
    def __init__(self,
                 model_type=1,
                 model_params={
                     'n_estimators': 200,
                     'max_depth': 10,
                     'max_features': 0.4
                 }):
        self.break_reason_mapping = {
            1: 1,
Exemple #48
0
PASSWORD =

[hostnames]
2=127.0.0.1
3=localhost
"""

cfg = SafeConfigParser()
cfg.readfp(StringIO(defaults))
cfg.read(os.path.join(BASE_DIR, "local.cfg"))

if cfg.getboolean("debug", "DEBUG") is True:
    print("WARNING: Not in production mode, If you are running on the server, stop right now")

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = cfg.get("general", "SECRET_KEY")

# Setting debug to false forces everything else into secure production settings
DEBUG = cfg.getboolean("debug", "DEBUG")

CSRF_COOKIE_SECURE = cfg.getboolean("debug", "CSRF_COOKIE_SECURE")
CSRF_COOKIE_HTTPONLY = cfg.getboolean("debug", "CSRF_COOKIE_HTTPONLY")
SESSION_COOKIE_SECURE = cfg.getboolean("debug", "SESSION_COOKIT_SECURE")

SECURE_BROWSER_XSS_FILTER = cfg.getboolean("debug", "SECURE_BROWSER_XSS_FILTER")

SECURE_CONTENT_TYPE_NOSNIFF = cfg.getboolean("debug", "SECURE_CONTENT_TYPE_NOSNIFF")

X_FRAME_OPTIONS = cfg.get("general", "X_FRAME_OPTIONS")

# We don't have any default hostnames for debug
Exemple #49
0
    

    inParamList = sys.argv[1].split(',')
    expName = sys.argv[2]
    gridName = sys.argv[3]
    calName = sys.argv[4]
    calFile = sys.argv[5]

    # Let's read in all parameters that can be varied by looking
    # for those that have step sizes specified. All the others
    # only have fiducials.
    iniFile = "input/pipeline.ini"
    Config = SafeConfigParser()
    Config.optionxform=str
    Config.read(iniFile)
    bigDataDir = Config.get('general','bigDataDirectory')

    manualParamList = Config.get('general','manualParams').split(',')

    paramList = [] # the parameters that can be varied
    fparams = {}   # the 
    stepSizes = {}
    for (key, val) in Config.items('params'):
        if key in manualParamList: continue
        if ',' in val:
            param, step = val.split(',')
            paramList.append(key)
            fparams[key] = float(param)
            stepSizes[key] = float(step)
        else:
            fparams[key] = float(val)
Exemple #50
0
                           datetime.now().strftime('%d-%m-%Y_%H-%M-%S'))
try:
    os.makedirs(results_dir)
except OSError as e:
    if e.errno != errno.EEXIST:
        raise  # This was not a "directory exist" error..

# Copy settings to the result directory
shutil.copytree('Setup_Files', results_dir + '/Setup_Files_used_for_run')
shutil.copytree('App_data', results_dir + '/App_data_used_for_run')

f = open(results_dir + "/Results.txt", "w+")
fp = open(results_dir + "/False_positive_results.txt", "w+")

# Parse CONFIG
URL_encoding = parser.get("Config", "Remove_UTF-8_URL_encoding")
ASCII_encoding_in_get_requests = parser.get("Config",
                                            "ASCII_encoding_in_get_requests")

# Parse DISPLAY
Line_number = parser.get("Display", "Line_number")
Client_IP = parser.get("Display", "Client_IP")
Information_avalibility = parser.get("Display", "Information_avalibility")
User_ID = parser.get("Display", "User_ID")
Time = parser.get("Display", "Time")
Request_line = parser.get("Display", "Request_line")
Status_code = parser.get("Display", "Status_code")
Size = parser.get("Display", "Size")
Ref_display = parser.get("Display", "Referrer")
User_Agent = parser.get("Display", "User_Agent")
Protacol = parser.get("Display", "Protacol")
Exemple #51
0
class MakefileParser(object):
    def __init__(self):
        self.maxDiff = None
        self.parser = SafeConfigParser()
        self.parser.read('otbcfg.ini')
        if not os.path.exists('otbcfg.ini'):
            raise Exception(
                "OTB_SOURCE_DIR and OTB_BINARY_DIR must be specified in the file otbcfg.ini"
            )

        self.root_dir = self.parser.get('otb', 'checkout_dir')
        if not os.path.exists(self.root_dir):
            raise Exception(
                "Check otbcfg.ini : OTB_SOURCE_DIR and OTB_BINARY_DIR must be specified there"
            )
        self.build_dir = self.parser.get('otb', 'build_dir')
        if not os.path.exists(self.build_dir):
            raise Exception(
                "Check otbcfg.ini : OTB_SOURCE_DIR and OTB_BINARY_DIR must be specified there"
            )
        self.logger = get_OTB_log()

    def test_CMakelists(self):
        provided = {}
        provided["OTB_SOURCE_DIR"] = self.root_dir
        provided["OTB_BINARY_DIR"] = self.build_dir
        provided["OTB_DATA_LARGEINPUT_ROOT"] = os.path.normpath(
            os.path.join(self.root_dir, "../OTB-Data/Input"))

        try:
            with open(os.path.join(self.root_dir,
                                   "CMakeLists.txt")) as file_input:
                content = file_input.read()
                output = parse(content)

                defined_paths = [
                    each for each in output if 'Command' in str(type(each))
                    and "FIND_PATH" in each.name
                ]
                the_paths = {
                    key.body[0].contents:
                    [thing.contents for thing in key.body[1:]]
                    for key in defined_paths
                }

                the_sets = [
                    each for each in output if 'Command' in str(type(each))
                    and "SET" in each.name.upper()
                ]
                the_sets = {
                    key.body[0].contents:
                    [thing.contents for thing in key.body[1:]]
                    for key in the_sets
                }
                the_sets = {key: " ".join(the_sets[key]) for key in the_sets}

                the_strings = set([
                    each.body[-1].contents for each in output
                    if 'Command' in str(type(each))
                    and "STRING" in each.name.upper()
                ])

                def mini_clean(item):
                    if item.startswith('"') and item.endswith(
                            '"') and " " not in item:
                        return item[1:-1]
                    return item

                the_sets = {key: mini_clean(the_sets[key]) for key in the_sets}

                def templatize(item):
                    if "$" in item:
                        return Template(item)
                    return item

                for key in the_sets:
                    if key in the_strings:
                        the_sets[key] = the_sets[key].lower()

                the_sets = {key: templatize(the_sets[key]) for key in the_sets}

                for path in the_paths:
                    target_file = the_paths[path][1]
                    suggested_paths = []
                    if len(the_paths[path]) > 2:
                        suggested_paths = the_paths[path][2:]

                    try:
                        provided[path] = find_file(target_file)
                    except Exception as e:
                        for each in suggested_paths:
                            st = Template(each)
                            pac = os.path.abspath(st.safe_substitute(provided))
                            if os.path.exists(pac):
                                provided[path] = pac
                                break

                resolve_dict(provided, the_sets)
                provided.update(the_sets)

                return provided
        except Exception as e:
            traceback.print_exc()
            self.fail(str(e))

    def add_make(self, previous_context, new_file):
        input = open(new_file).read()
        output = parse(input)
        apps = [each for each in output if 'Command' in str(type(each))]
        setcommands = [each for each in apps if 'SET' in each.name.upper()]
        stringcommands = [
            each for each in apps if 'STRING' in each.name.upper()
        ]

        environment = previous_context

        def mini_clean(item):
            if item.startswith('"') and item.endswith('"') and " " not in item:
                return item[1:-1]
            return item

        new_env = {}
        for command in setcommands:
            key = command.body[0].contents
            ct = " ".join([item.contents for item in command.body[1:]])
            ct = mini_clean(ct)

            if "$" in ct:
                values = Template(ct)
            else:
                values = ct

            new_env[key] = values

        for stringcommand in stringcommands:
            key = stringcommand.body[-1].contents
            ct = stringcommand.body[-2].contents
            ct = mini_clean(ct.lower())

            if "$" in ct:
                values = LowerTemplate(ct)
            else:
                values = ct
            new_env[key] = values

        resolve_dict(environment, new_env)
        environment.update(new_env)

        return environment

    def get_apps(self, the_makefile, the_dict):
        input = open(the_makefile).read()
        output = parse(input)
        apps = [each for each in output if 'Command' in str(type(each))]
        otb_apps = [
            each for each in apps
            if 'OTB_TEST_APPLICATION' in each.name.upper()
        ]
        return otb_apps

    def get_tests(self, the_makefile, the_dict):
        input = open(the_makefile).read()
        output = parse(input)
        apps = [each for each in output if 'Command' in str(type(each))]
        otb_tests = [each for each in apps if 'ADD_TEST' in each.name.upper()]
        return otb_tests

    def get_apps_with_context(self, the_makefile, the_dict):
        input = open(the_makefile).read()
        output = parse(input)

        def is_a_command(item):
            return 'Command' in str(type(item))

        appz = []
        context = []
        for each in output:
            if is_a_command(each):
                if 'FOREACH' in each.name and 'ENDFOREACH' not in each.name:
                    args = [item.contents for item in each.body]
                    context.append(args)
                elif 'ENDFOREACH' in each.name:
                    context.pop()
                elif 'OTB_TEST_APPLICATION' in each.name.upper():
                    appz.append((each, context[:]))
        return appz

    def get_name_line(self, the_list, the_dict):
        items = ('NAME', 'APP', 'OPTIONS', 'TESTENVOPTIONS', 'VALID')
        itemz = [[], [], [], [], []]
        last_index = 0
        for each in the_list:
            if each.contents in items:
                last_index = items.index(each.contents)
            else:
                itemz[last_index].append(each.contents)
        result = itemz[0][0]
        the_string = Template(result).safe_substitute(the_dict)

        if '$' in the_string:
            neo_dict = the_dict
            the_string = Template(the_string).safe_substitute(neo_dict)
            while '$' in the_string:
                try:
                    the_string = Template(the_string).substitute(neo_dict)
                except KeyError as e:
                    self.logger.warning("Key %s is not found in makefiles" %
                                        str(e))
                    neo_dict[str(e)] = ""

        if 'string.Template' in the_string:
            raise Exception("Unexpected toString call in %s" % the_string)

        return the_string

    def get_command_line(self, the_list, the_dict):
        items = ('NAME', 'APP', 'OPTIONS', 'TESTENVOPTIONS', 'VALID')
        itemz = [[], [], [], [], []]
        last_index = 0
        for each in the_list:
            if each.contents in items:
                last_index = items.index(each.contents)
            else:
                itemz[last_index].append(each.contents)
        result = []
        result.extend(["otbcli_%s" % each for each in itemz[1]])

        if len(result[0]) == 7:
            raise Exception("App name is empty!")

        result.extend(itemz[2])
        result.append("-testenv")
        result.extend(itemz[3])
        the_string = Template(" ".join(result)).safe_substitute(the_dict)

        if '$' in the_string:
            neo_dict = the_dict
            the_string = Template(" ".join(result)).safe_substitute(neo_dict)
            while '$' in the_string:
                try:
                    the_string = Template(the_string).substitute(neo_dict)
                except KeyError as e:
                    self.logger.warning("Key %s is not found in makefiles" %
                                        str(e))
                    neo_dict[str(e)] = ""

        if 'string.Template' in the_string:
            raise Exception("Unexpected toString call in %s" % the_string)

        return the_string

    def get_test(self, the_list, the_dict):
        items = ('NAME', 'APP', 'OPTIONS', 'TESTENVOPTIONS', 'VALID')
        itemz = [[], [], [], [], []]
        last_index = 0
        for each in the_list:
            if each.contents in items:
                last_index = items.index(each.contents)
            else:
                itemz[last_index].append(each.contents)
        result = ["otbTestDriver"]
        result.extend(itemz[4])

        if len(result) == 1:
            return ""

        the_string = Template(" ".join(result)).safe_substitute(the_dict)

        if '$' in the_string:
            neo_dict = the_dict
            the_string = Template(" ".join(result)).safe_substitute(neo_dict)
            while '$' in the_string:
                try:
                    the_string = Template(the_string).substitute(neo_dict)
                except KeyError as e:
                    self.logger.warning("Key %s is not found in makefiles" %
                                        str(e))
                    neo_dict[str(e)] = ""

        if 'string.Template' in the_string:
            raise Exception("Unexpected toString call in %s" % the_string)

        return the_string

    def test_algos(self):
        tests = {}

        algos_dir = os.path.join(self.root_dir, "Testing/Applications")
        makefiles = find_files("CMakeLists.txt", algos_dir)
        to_be_excluded = os.path.join(self.root_dir,
                                      "Testing/Applications/CMakeLists.txt")
        if to_be_excluded in makefiles:
            makefiles.remove(to_be_excluded)

        resolve_algos = {}
        for makefile in makefiles:
            intermediate_makefiles = []
            path = makefile.split(os.sep)[len(self.root_dir.split(os.sep)):-1]
            for ind in range(len(path)):
                tmp_path = path[:ind + 1]
                tmp_path.append("CMakeLists.txt")
                tmp_path = os.sep.join(tmp_path)
                candidate_makefile = os.path.join(self.root_dir, tmp_path)
                if os.path.exists(candidate_makefile):
                    intermediate_makefiles.append(candidate_makefile)
            resolve_algos[makefile] = intermediate_makefiles

        dict_for_algo = {}
        for makefile in makefiles:
            basic = self.test_CMakelists()
            last_context = self.add_make(
                basic,
                os.path.join(self.root_dir,
                             "Testing/Utilities/CMakeLists.txt"))
            for intermediate_makefile in resolve_algos[makefile]:
                last_context = self.add_make(last_context,
                                             intermediate_makefile)
            dict_for_algo[makefile] = last_context

        for makefile in makefiles:
            appz = self.get_apps_with_context(makefile,
                                              dict_for_algo[makefile])

            for app, context in appz:
                if len(context) == 0:
                    import copy
                    ddi = copy.deepcopy(dict_for_algo[makefile])
                    tk_dict = autoresolve(ddi)
                    tk_dict = autoresolve(tk_dict)

                    name_line = self.get_name_line(app.body, tk_dict)
                    command_line = self.get_command_line(app.body, tk_dict)
                    test_line = self.get_test(app.body, tk_dict)

                    if '$' in test_line or '$' in command_line:
                        if '$' in command_line:
                            self.logger.error(command_line)
                        if '$' in test_line:
                            self.logger.warning(test_line)
                    else:
                        tests[name_line] = (command_line, test_line)
                else:
                    contexts = {}
                    for iteration in context:
                        key = iteration[0]
                        values = [each[1:-1].lower() for each in iteration[1:]]
                        contexts[key] = values

                    keyorder = list(contexts.keys())
                    import itertools
                    pool = [
                        each
                        for each in itertools.product(*list(contexts.values()))
                    ]

                    import copy
                    for poolinstance in pool:
                        neo_dict = copy.deepcopy(dict_for_algo[makefile])
                        zipped = list(zip(keyorder, poolinstance))
                        for each in zipped:
                            neo_dict[each[0]] = each[1]

                        ak_dict = autoresolve(neo_dict)
                        ak_dict = autoresolve(ak_dict)
                        ak_dict = autoresolve(ak_dict)

                        ddi = ak_dict

                        name_line = self.get_name_line(app.body, ddi)
                        command_line = self.get_command_line(app.body, ddi)
                        test_line = self.get_test(app.body, ddi)

                        if '$' in command_line or '$' not in test_line:
                            if '$' in command_line:
                                self.logger.error(command_line)
                            if '$' in test_line:
                                self.logger.warning(test_line)
                        else:
                            tests[name_line] = (command_line, test_line)

        return tests
  if if_data is None:
    continue
  ip_addr = if_data[0]['addr']
  if ip_addr.split('.')[0] == '192':
    my_ip_address = ip_addr

if my_ip_address is None:
  print("Unable to Get Local Ip Address")
  print("Exit")
  sys.exit()

print("My Local Ip Address: " + my_ip_address)

# List of Records
response = client.list_resource_record_sets(
    HostedZoneId=config.get('main', 'HostedZoneId'),
    MaxItems='10'
)

pp = pprint.PrettyPrinter(indent=4)
pp.pprint(response['ResourceRecordSets'])

# Update Record
response = client.change_resource_record_sets(
  HostedZoneId=config.get('main', 'HostedZoneId'),
  ChangeBatch={
    'Changes': [
      {
        'Action': 'UPSERT',
        'ResourceRecordSet': {
          'Name': config.get('main', 'NameValue'),
Exemple #53
0
def main():  # pragma: no cover
    config = SafeConfigParser()
    dirs = ('.', '/etc', '/usr/local/etc')
    if not config.read([os.path.join(dir, config_file) for dir in dirs]):
        sys.exit('Could not find {} in {}'.format(config_file, dirs))

    try:
        logfile = config.get('logging', 'file')
        rotating = config.getboolean('logging', 'rotate', fallback=False)
        if rotating:
            max_size = config.get('logging', 'max_size', fallback=1048576)
            backup_count = config.get('logging', 'backup_count', fallback=5)
            handler = logbook.RotatingFileHandler(logfile,
                                                  max_size=max_size,
                                                  backup_count=backup_count)
        else:
            handler = logbook.FileHandler(logfile)
        handler.push_application()
    except:
        logbook.StderrHandler().push_application()

    try:
        kwargs = dict(config.items('mongodb'))
    except NoSectionError:
        sys.exit('No "mongodb" section in config file')
    args = []
    for arg in ('hosts', 'database', 'username', 'password'):
        try:
            args.append(config.get('mongodb', arg))
        except NoOptionError:
            sys.exit(
                'No "{}" setting in "mongodb" section of config file'.format(
                    arg))
        kwargs.pop(arg)
    args[0] = [s.strip() for s in args[0].split(',')]
    store = MongoStore(*args, **kwargs)

    try:
        email_sender = config.get('email', 'sender')
    except NoSectionError:
        sys.exit('No "email" section in config file')
    except NoOptionError:
        sys.exit('No "sender" setting in "email" section of config file')

    business_logic = BusinessLogic(store, email_sender)

    try:
        listen_port = int(config.get('wsgi', 'port'))
        log.info('Binding to port {}'.format(listen_port))
    except:
        listen_port = 80
        log.info('Binding to default port {}'.format(listen_port))

    try:
        auth_key = config.get('wsgi', 'auth_key')
        log.info('Server authentication enabled')
    except:
        log.warning('Server authentication DISABLED')
        auth_key = None

    httpd = make_server('',
                        listen_port,
                        partial(application, business_logic, auth_key),
                        handler_class=LogbookWSGIRequestHandler)
    business_logic.schedule_next_deadline()
    httpd.serve_forever()
Exemple #54
0
lstFile = os.path.join(cfgDir, 'podcasts.list')
if not os.path.isfile(cfgFile):
    source = pkg_resources.resource_stream(__name__, 'config/podcatch.conf')
    with open(cfgFile, 'w') as dest:
        dest.writelines(source)
if not os.path.isfile(lstFile):
    source = pkg_resources.resource_stream(__name__, 'config/podcasts.list')
    with open(lstFile, 'w') as dest:
        dest.writelines(source)

paramParser = SafeConfigParser()
paramParser.read(cfgFile)

# General
podcastListPath = os.path.expanduser(
    str(paramParser.get('General', 'podcastListPath')))
if not os.path.isabs(podcastListPath):
    podcastListPath = os.path.join(cfgDir, podcastListPath)

defaultPodDirPath = os.path.expanduser(
    str(paramParser.get('General', 'defaultPodDirPath')))
defaultPodDirPath = defaultPodDirPath + ('' if defaultPodDirPath.endswith('/')
                                         else '/')
if not os.path.isabs(defaultPodDirPath):
    raise ValueError('The defaultPodDirPath parameter in ' + cfgFile +
                     ' should be an absolute path.')

# Podcasts
try:
    maxNbEpisodes = int(paramParser.get('Podcasts', 'maxNbEpisodes'))
except:
def get_base_config(ROOTDIR):
    """Parse config file into a configuration object.

    Returns:
    - result: (type: baseObj) configuration object.

    baseObj:
    - user_agent: (type: string) user agent used by Python requests.
    - output_folder: (type: string) temporary folder for downloaded files.
    - geolite_db: (type: string) GeoLite city DB file path.
    - asn_db: (type: string) GeoLite ASN DB file path.
    - date_format: (type: string) format of date stamps sent to Viper.
    - redirect_limit: (type: int) number of HTTP redirects to handle before aborting.
    - use_tor: (type: string) Use Tor for all malware downloads?
    - tor_ip: (type: string) IP that Tor is listening on.
    - tor_port: (type: string) Port that Tor is listening on.
    - hash_count_limit: (type: int) Number of copies of a unique file to permit.
    - url_char_limit: (type: int) Character limit for acceptable URLs.
    - vt_key: (type: string) VirusTotal API key.
    - vt_user: (type: string) VirusTotal username.
    - vt_req_min: (type: int) VirusTotal requests per minute limit.
    - vt_score_min: (type: int) minimum VirusTotal score to accept.
    - vt_preferred_engines: (type: string list) comma separated list of preferred VirusTotal engines.
    - malware_days: (type: int) general number of days back to consider malware valid for.
    - tag_samples: (type: string) Tag samples using VirusTotal?
    - blacklisted_tags: (type: string list) comma separated list of blacklisted malware family strings.
    - malware_workers: (type: int) number of wild file processors to spawn.
    - viper_add_url: (type: string) URL of Viper entry addition API.
    - viper_token: (type: string) Django REST API token.
    """
    parser = SafeConfigParser()
    parser.read(
        os.path.join(os.path.dirname(__file__), 'config', 'settings.conf'))

    user_agent = parser.get('Core', 'useragent')
    output_folder = parser.get('Core', 'outputfolder')
    geolite_db = parser.get('Core', 'geolitedb')
    asn_db = parser.get('Core', 'asndb')
    date_format = parser.get('Core', 'dateformat')
    redirect_limit = parser.get('Core', 'redirectlimit')
    use_tor = parser.get('Core', 'usetor')
    tor_ip = parser.get('Core', 'torip')
    tor_port = parser.get('Core', 'torport')
    hash_count_limit = parser.get('Core', 'hashcountlimit')
    url_char_limit = parser.get('Core', 'urlcharlimit')
    vt_key = parser.get('VirusTotal', 'apikey')
    vt_user = parser.get('VirusTotal', 'username')
    vt_req_min = parser.get('VirusTotal', 'requestsperminute')
    vt_score_min = parser.get('VirusTotal', 'scoreminimum')
    vt_preferred_engines = parser.get('VirusTotal', 'preferredengines')
    malware_days = parser.get('Malware', 'malwaredays')
    tag_samples = parser.get('Malware', 'tagsamples')
    blacklisted_tags = parser.get('Malware', 'blacklistedtags')
    malware_workers = parser.get('Malware', 'workers')
    viper_add_url = parser.get('Viper', 'addurl')
    viper_token = parser.get('Viper', 'apitoken')

    return baseObj(user_agent, output_folder, geolite_db, asn_db, date_format,
                   redirect_limit, use_tor, tor_ip, tor_port, hash_count_limit,
                   url_char_limit, vt_key, vt_user, vt_req_min, vt_score_min,
                   vt_preferred_engines, malware_days, tag_samples,
                   blacklisted_tags, malware_workers, viper_add_url,
                   viper_token)
Exemple #56
0
#!/usr/bin/python
from  configparser import SafeConfigParser
import praw
import random
import logging

logging.basicConfig(filename="simplelog.log",  format='%(asctime)s %(levelname)s %(message)s',level=logging.INFO)

config = SafeConfigParser()
config.read('config.ini')
tom_bot = praw.Reddit(user_agent=config.get('Bot', 'user_agent'),
				      client_id=config.get('Bot', 'client_id'),
				      client_secret=config.get('Bot', 'client_secret'),
				      username=config.get('Bot', 'username'),
				      password=config.get('Bot', 'password'))

AllNewMessages = tom_bot.inbox.unread(limit=None)
for message in AllNewMessages:
	   if "irishfact" in message.body.lower(): 
	   	  unread_messages = []
	   	  unread_messages.append(message)
	   	  tom_bot.inbox.mark_read(unread_messages)
	   	  what_user = str(message.author)
	   	  logging.basicConfig(filename="simplelog.log",  format='%(asctime)s %(levelname)s %(message)s',level=logging.INFO)
	   	  logging.info('Message sent to: ' +what_user)
	   	  mgs = random.choice(list(open('facts.txt')))
	   	  tom_bot.redditor(what_user).message('random Fact', 'Hello ' +what_user+',\n\n Here is your Irish random fact: \n\n' +mgs)
        with open(config_file) as f:
            config.readfp(f, config_file)
    except OSError:
        pass
    for option in (
            "jid",
            "jabber_password",
            "conference_domain",
            "mode",
            "zulip_email_suffix",
            "jabber_server_address",
            "jabber_server_port",
    ):
        if getattr(options, option) is None and config.has_option(
                "jabber_mirror", option):
            setattr(options, option, config.get("jabber_mirror", option))

    for option in ("no_use_tls", ):
        if getattr(options, option) is None:
            if config.has_option("jabber_mirror", option):
                setattr(options, option,
                        config.getboolean("jabber_mirror", option))
            else:
                setattr(options, option, False)

    if options.mode is None:
        options.mode = "personal"

    if options.zulip_email_suffix is None:
        options.zulip_email_suffix = ""
Exemple #58
0
from configparser import SafeConfigParser
from syslog import syslog, LOG_ERR, LOG_INFO
from subprocess import Popen, STDOUT
import sys
import os
import json

# Extract config
configparser = SafeConfigParser()
try:
    configparser.read('/etc/packer-utils/config.ini')
    UUID_FILE = configparser.get('rally-image-testing', 'UUID_FILE')
    RALLY_JSON_RESULTS = configparser.get('rally-image-testing',
                                          'RALLY_JSON_RESULTS')
except Exception as e:
    syslog(LOG_ERR, 'Unable to read from config file')
    syslog(LOG_ERR, repr(e))
    sys.exit(1)


class RallyTaskAnalysis:
    def test_analysis(self):
        task_uuid = self.get_task_uuid()
        json_results = self.get_json_data(task_uuid)

        image_ok = True
        self.analyse_json_data(json_results)

        # Making results of this data analysis clearer in the logging
        image_results_header = [
            '-----------------------------------------',
Exemple #59
0
    newshape = np.concatenate([[L, 1] for L in shape])
    d = np.reshape(d, newshape)
    # And tile
    d = np.tile(d, np.concatenate([[1, s] for s in fullsteps]))
    # Finally reshape back to proper dimensionality
    return np.reshape(d, np.array(shape) * np.array(fullsteps))


#outDir = "/gpfs01/astro/www/msyriac/web/work/"
outDir = "/Users/nab/Desktop/"

iniFile = "input/pipeline.ini"
Config = SafeConfigParser()
Config.optionxform = str
Config.read(iniFile)
bigDataDir = Config.get('general', 'bigDataDirectory')
clttfile = Config.get('general', 'clttfile')

gridName = "grid-owl2"
#gridName = "grid-default"
#version = "0.3_ysig_0.127"
version = ["1.1", "1.11"]  #,"1.1"]#"0.7"
cal = "owl2"  #"CMB_all"
#cal = "CMB_all_PICO"
#cal = "CMB_pol_miscentered"

from orphics.io import dict_from_section, list_from_config
constDict = dict_from_section(Config, 'constants')
clusterDict = dict_from_section(Config, 'cluster_params')

fparams = {}  # the
Exemple #60
0
import unidecode

from shapely.geometry import Point, Polygon
from preprocess import checkmonth, replace_char_to_number
from preprocess import LIST_FIRSTNAME_PREPROCESS, LIST_MIDNAME_PREPROCESS

from Matchfield import bankMatch, firstnameMatch, midnameMatch, typecardMatch

from flask import Flask, render_template, jsonify, request, Response, send_from_directory
from flask_restful import Resource, Api
from flask_cors import CORS

#####LOAD CONFIG####
config = SafeConfigParser()
config.read("config/services.cfg")
LOG_PATH = str(config.get('main', 'LOG_PATH'))
SERVER_IP = str(config.get('main', 'SERVER_IP'))
SERVER_PORT = int(config.get('main', 'SERVER_PORT'))
UPLOAD_FOLDER = str(config.get('main', 'UPLOAD_FOLDER'))
RESULT_FOLDER = str(config.get('main', 'RESULT_FOLDER'))
RECOG_FOLDER = str(config.get('main', 'RECOG_FOLDER'))

if not os.path.exists(LOG_PATH):
    os.mkdir(LOG_PATH)

if not os.path.exists(UPLOAD_FOLDER):
    os.mkdir(UPLOAD_FOLDER)

if not os.path.exists(RESULT_FOLDER):
    os.mkdir(RESULT_FOLDER)