Ejemplo n.º 1
0
    def load_cpu_obj(self):
        '''
            To Load CPU object from config
            Return:
                Object populated with processor details : SUCCESS
                False : FAILURE

            Authhor : Suren Kumar Moorthy
        '''
        try:
            logger.info("Loading CPU object from Config")
            self.handle.execute_cmd_list("top", "scope chassis")
            out = self.handle.execute_cmd("show cpu detail")
            regex = r'(?:(?:Xeon\(R\)\s*CPU\s+([a-zA-Z0-9\s\-]+)(?:\s+)?\@)|(?:Xeon\(R\)\s*([a-zA-Z0-9\s\-]+)\s+CPU))'
            model_reg_out = re.search(regex, out)
            model = str(model_reg_out.group(2)) if model_reg_out.group(1) is None else str(model_reg_out.group(1))
            rep = re.compile(r'\s+$')
            model = rep.sub('', model)
            rep = re.compile(r'\s+')
            model = rep.sub('-', model)
            logger.info('Model selected: ' +model)
            con = ConfigParser()
            proccessor_config = con.load_processor_config(model).proceesor_details
            return proccessor_config
        except:
            dump_error_in_lib()
            return False
Ejemplo n.º 2
0
def main(config: ConfigParser):
    logger = config.get_logger("train")

    # setup data_loader instances
    data_loader = config.initialize("train_data_loader", module_data)
    valid_data_loader = config.initialize("val_data_loader", module_data)

    # build model architecture, then print to console
    model = config.initialize("arch", module_arch)
    logger.info(model)

    # get function handles of loss and metrics
    loss = getattr(module_loss, config["loss"])
    metrics = [getattr(module_metric, met) for met in config["metrics"]]

    # build optimizer, learning rate scheduler.
    # delete every lines containing lr_scheduler for disabling scheduler
    trainable_params = filter(lambda p: p.requires_grad, model.parameters())
    # optimizer = config.initialize("optimizer", module_optim, trainable_params)
    optimizer = config.initialize("optimizer", module_optim)
    optimizer.set_parameters(trainable_params)

    trainer = Trainer(
        model,
        loss,
        metrics,
        optimizer,
        config=config,
        data_loader=data_loader,
        valid_data_loader=valid_data_loader,
    )

    trainer.train()
Ejemplo n.º 3
0
def main(args=None):
    commandline_parser = CommandlineParser()
    commandline_parameters = commandline_parser.parse()
    config_parser = ConfigParser()
    if os.path.isfile(config_parser.path):
        config_parameters = config_parser.parse()
        parameters = config_parameters + commandline_parameters
    else:
        parameters = commandline_parameters
    repository_combiner.combine(parameters)
Ejemplo n.º 4
0
	def fill(self, target, args):
		targetElement, filePath = args
		filePath = ''.join(filePath)
		# add base dir for config file
		filePath = self.configPath + "/" + filePath
		parser = ConfigParser(filePath)
		alt_dict = parser.parseFile()
		element, attr = targetElement.split(' ')
		if target.has_attr('hash'):
			targetHash = target['hash'].encode('ascii')
			if alt_dict.has_key(targetHash): # 对于未经过配置的内容,不做替换处理
				target[attr] = alt_dict[targetHash]
Ejemplo n.º 5
0
class defaultValueHandler:
    def __init__(self, FILE_NAME):
        self.feature_obj = ConfigParser(FILE_NAME)

    def getConfigKeyList(self):
        return self.feature_obj.config_key_list

    def getConfigMap(self):
        return self.feature_obj.config_map

    def setConfigMap(self, new_dict):
        self.feature_obj.update_dictionary(new_dict)
Ejemplo n.º 6
0
    def __init__(self):
        data = {'source': {'operation': 'init', 'timestamp': time.time()},
                'type': 'root',
                'policy_path': []}
        self.policy = ConfigTree(data, sort_key, key_to_string)
        self.reference = ConfigTree(data, sort_key, key_to_string)

        self._parser = ConfigParser()
        self._policy_parser = PolicyParser()
        self._pattern_parser = PatternParser()
        self._configs = [ConfigTree(data, sort_key, key_to_string)]
        self._load_policy()
Ejemplo n.º 7
0
def main(config: ConfigParser, resume: str):
    # setup data_loader instances
    data_loader = getattr(module_data, config['data_loader']['type'])(
        src_path=config['data_loader']['args']['test_src_path'],
        tgt_path=config['data_loader']['args']['test_tgt_path'],
        src_preprocessor_path=config['data_loader']['args']['src_preprocessor_path'],
        tgt_preprocessor_path=config['data_loader']['args']['tgt_preprocessor_path'],
        batch_size=config['data_loader']['args']['batch_size'],
        shuffle=False,
        validation_split=0.0,
        num_workers=1,
    )

    # build model architecture
    model = config.initialize('arch', module_arch)
    print(model)

    # load state dict
    print(f'Loading checkpoint: {resume}')
    checkpoint = torch.load(resume)
    state_dict = checkpoint['state_dict']
    if config['n_gpu'] > 1:
        model = torch.nn.DataParallel(model, device_ids=config.device)
    model.load_state_dict(state_dict)

    if config['n_gpu'] > 1:
        model = model.module  # for data parallel

    # prepare model for testing
    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
    model.to(device)
    model.eval()

    trans_args = {
        'model': model,
        'src_preprocessor': data_loader.src_text_preprocessor,
        'tgt_preprocessor': data_loader.tgt_text_preprocessor,
    }
    translator = config.initialize('translator', module_translator, *trans_args.values())

    # prepare output file
    out_f = (config.test_dir / config['translator']['output']).open('w')

    with torch.no_grad():
        for batch_idx, (src, tgt, lengths, indices) in enumerate(tqdm(data_loader)):
            src, tgt = src.to(device), tgt.to(device)
            pred_batch, _, _ = translator.translate(src, None, lengths, indices)

            for b in range(len(pred_batch)):
                out_f.write(' '.join(pred_batch[b][0]) + '\n')

    out_f.close()
Ejemplo n.º 8
0
    def validate_cdn_techsupport(self, config):
        con = ConfigParser()
        tftp_config = con.load_common_config().tftp_share
        remote_ip = tftp_config.tftp_server_ip
        remote_user = tftp_config.tftp_user
        remote_passwd = tftp_config.tftp_password
        tftp_root_dir = tftp_config.tftp_root_path
        tftp_handle = LinuxUtils(remote_ip, remote_user, remote_passwd)
        tftp_handle.connect()
        ts_path = tftp_root_dir + '/TechSupport/'
        tftp_handle.execute_cmd('mkdir -p ' + ts_path)
        tftp_handle.execute_cmd('chmod 777 ' + ts_path)
        tftp_handle.execute_cmd('tar -xzvf /TFTP_DIR/' + self.ts_file_path +
                                ' ' + "-C" + ' ' + ts_path)

        platform_type = config.mgmtdetail.platform_series
        if platform_type == 'M5':
            cdn_ts = tftp_handle.execute_cmd('cat' + ' ' + ts_path +
                                             'mnt/jffs2/BIOS/bt/BiosTech.log \
                                        | grep "Patched eNIC Name"')
        else:
            cdn_ts = tftp_handle.execute_cmd('cat' + ' ' + ts_path +
                                             'var/nuova/BIOS/BiosTech.txt \
                                        | grep "Patched eNIC Name"')
        time.sleep(20)
        tftp_handle.disconnect()
        cdn_from_tsr = re.findall(r'=\s+([^\r\n\'\s]+)', cdn_ts)
        logger.info('CDN info from Tech-support data')
        logger.info(cdn_from_tsr)
        '''Getting CDN name from CIMC'''
        logger.info('Fetching CDN name from CIMC CLI')
        vic_list = config.inventory_detail
        vic_obj = VicLib(self, config)
        for vic in vic_list:
            slot_no = vic.slot_number

            out = vic_obj.cimc_cdn_mac_dict(slot_no)
            cnd_from_cimc = []
            for cdn_name in out.values():
                cnd_from_cimc.append(cdn_name)
            logger.info('CDN name from CIMC')
            logger.info(cnd_from_cimc)

            for val in cdn_from_tsr:
                if val not in cnd_from_cimc:
                    logger.info(
                        "From CIMC CDN name are not same as TSR CDN name")
                    return False
        return True
Ejemplo n.º 9
0
    def ParseDeviceConfigurations(self, folder):
        import os
        default_folder_path = os.getcwd()
        # check if absolute path provided
        if not isabs(folder): folder = default_folder_path + '/' + folder
        try:
            # parses Cisco PIX, IOX, ASA firewall configurations
            parser = ConfigParser()
            parser.Parse(folder)

        except BaseException, e:
            if e.message:
                log.error(e.message)
            else:
                log.error("%s" % e)
Ejemplo n.º 10
0
    def __init__(self, config='~/.telegram-cloner.json'):
        self._config = ConfigParser(config).parse()

        self._tg_client = TelegramClient(self._config['app']['session'],
                                         self._config['telegram']['api_id'],
                                         self._config['telegram']['api_hash'])
        self._tg_client.start()
Ejemplo n.º 11
0
 def test_process_line_with_value_mutator(self):
     mutator = lambda v: "||{}||".format(v)
     valid_line = '{}{}{}'.format(self.CHUNK_A, self.DELIMITER, self.CHUNK_B)
     self.assertEqual(
         ConfigParser._process_line(valid_line, self.DELIMITER, value_mutator=mutator),
         (self.CHUNK_A, mutator(self.CHUNK_B))
     )
Ejemplo n.º 12
0
 def test_parse_valid_config(self):
     expected_config = {
         'a': 'a',
         'b': 'b',
         'c': 'c',
     }
     actual_config = ConfigParser.parse('tests/fixtures/valid_config', '=')
     [self.assertEqual(expected_config.get(k), getattr(actual_config, k)) for k in expected_config]
Ejemplo n.º 13
0
 def HuuCompare(self, cimc_util_obj, testbed_name, config):
     config_parser = ConfigParser(testbed_name)
     config_parser.load_config(testbed_name)
     mgmt_handle = cimc_util_obj.handle
     output = mgmt_handle.execute_cmd_list('top', 'scope chassis',
                                           'show pci-adapter detail')
     logger.info(output)
     pci_card_details_cimc = {}
     pci_card_details_toc = {}
     pci_card = re.findall(r'product-name:*\s+([^\r\n]+)', output)
     pci_card_version = re.findall(r'fw-version:*\s+([^\r\n]+)', output)
     for (card, version) in zip(pci_card, pci_card_version):
         pci_card_details_cimc[card] = version
     #cmd_str = 'cat /data/home/kgeevane/grit_code/rackauto/tests/TOC_DELNORTE1.xml'
     #toc_out = subprocess.check_output(cmd_str, shell=True, stderr=subprocess.STDOUT).decode(encoding='utf_8', errors='strict')
     toc_out = cimc_util_obj.get_release_note_content(config)
     for pci_card in pci_card_details_cimc.keys():
         card_name_toc = config_parser.config.get('PciAdapterFWVersion',
                                                  pci_card)
         logger.info(card_name_toc)
         regex = r'component\s*name\=\"' + \
             card_name_toc + r'\".*?version\s*\=\"(.*?)\"'
         # Getting version from TOC file.
         pci_card_details_toc[pci_card] = re.search(regex, toc_out).group(1)
     logger.info("######## PCI CARD CIMC##############")
     logger.info(pci_card_details_cimc)
     logger.info("######## PCI CARD TOC ##############")
     logger.info(pci_card_details_toc)
     logger.info("####################################")
     pass_flag = 1
     for pci_card_cimc in pci_card_details_cimc.keys():
         if pci_card_details_cimc[pci_card_cimc] in pci_card_details_toc[
                 pci_card_cimc]:
             logger.info("PCI card " +
                         pci_card_details_cimc[pci_card_cimc] +
                         " version matches with TOC file")
         else:
             logger.error("PCI card " +
                          pci_card_details_cimc[pci_card_cimc] +
                          " version not matches with TOC file")
             pass_flag = 0
     if pass_flag == 1:
         self.passed(
             "Successfully verified all the cards versions with TOC xml")
     else:
         self.failed("Card verification with TOC xml got failed")
Ejemplo n.º 14
0
 def parse_buffer(self):
     self.buffer = \
             self.buffer[len(self.buffer)-3:len(self.buffer)]        # Delete buffer[0]
     self.buffer.append(self.charbuf)                                # Append charbuf to buffer
     self.charbuf = ''                                               # Empty charbuf
                                                                     # To execute in another child, just ad & at 
                                                                     # the end of command in config file
     print('[DEBUG]' + str(self.buffer), file=sys.stderr)            # Print the keycode
     cp = ConfigParser(config_path)                                  # ReParse the Config file as it permits user
                                                                     # to modify it on the fly
     action = cp.get_config_action(self.buffer)                      # Try the current buffer
     if action is not None:                                          # Did anything match ?
         print('[DEBUG]' + str(action), file=sys.stderr)             # Debugging informations: what did match
         os.system(action)                                           # Execute the command 
         if time.time() - self.lastinputtimestamp > 0.5:             # Authorize a maximum of 0.5s between two keystrokes
             self.buffer = ['', '', '', '']                          # If not, empty buffer
     self.lastinputtimestamp = time.time()
def main(config: ConfigParser):
    # setup data_loader instances
    data_loader = config.initialize('test_data_loader', module_data)

    # build model architecture
    model = config.initialize('arch', module_arch)
    print(model)

    # load state dict
    print(f'Loading checkpoint: {config.resume}')
    checkpoint = torch.load(config.resume)
    state_dict = checkpoint['state_dict']

    if config['n_gpu'] > 1:
        model = torch.nn.DataParallel(model, device_ids=config.device, dim=1)
        model.load_state_dict(state_dict)
        model = model.module
    else:
        model.load_state_dict(state_dict)

    # prepare model for testing
    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
    model.to(device)
    model.eval()

    # create translator by wrapping model
    translator = config.initialize('translator', module_translate, model,
                                   data_loader.text_preprocessor)

    # prepare output file
    out_f = (config.test_dir / config['translator']['output']).open('w')

    with torch.no_grad():
        for batch_idx, (src, tgt, tgt_lang, tgt_style, lengths,
                        indices) in enumerate(tqdm(data_loader)):
            src, tgt = src.to(device), tgt.to(device)
            tgt_lang, tgt_style = tgt_lang.to(device), tgt_style.to(device)

            pred_batch = translator.translate(src, tgt_lang, tgt_style,
                                              lengths, indices)

            for b in range(len(pred_batch)):
                out_f.write(' '.join(pred_batch[b][0]) + '\n')

    out_f.close()
Ejemplo n.º 16
0
 def get_cpu_max_freq(self):
     '''
     returns max cpu dimm supported frequency
     '''
     config = ConfigParser('')
     output = config.load_config('processor_mem_frequency')
     cpu_dict = dict(output.config._sections['cpu_freq'])
     logger.info("CPU dict is")
     logger.info(cpu_dict)
     cli_out = self.mgmt_handle.execute_cmd_list('top', 'scope chassis',
                                                 'show cpu detail')
     model_regex = r'(?:(?:Xeon\(R\)\s*CPU\s+([a-zA-Z0-9\s\-]+)\s+\@)|(?:Xeon\(R\)\s*([a-zA-Z0-9\s\-]+)\s+CPU))'
     model_reg_out = re.search(model_regex, cli_out)
     logger.info('Regular expr output is: ' + str(model_reg_out))
     cpu_model = (str(model_reg_out.group(2))
                  if model_reg_out.group(1) is None else str(
                      model_reg_out.group(1))).replace(' ', '').lower()
     return cpu_dict[cpu_model]
Ejemplo n.º 17
0
    def __init__(self, manager, slides):
        self.event_manager = manager
        self.event_manager.register_listener(self)

        self.screen = pygame.display.get_surface()

        if not len(slides):
            raise SlideLenError('Incorrect slide length: {0}'.format(sl))

        self.slides = slides

        parser = ConfigParser(constants.MAIN_CFG, constants.CFG_XMLNS)
        dir_name = parser.first_match('intro').attrib
        file_path = path.join(dir_name['dir'],
                              parser.first_match('blank').text)

        self.blank = ResourceManager().getImage(file_path)
        self.alphavalue = MAX_ALPHA
Ejemplo n.º 18
0
    def __init__(self):
        data = {"source": {"operation": "init", "timestamp": time.time()}, "type": "root", "policy_path": []}
        self.policy = ConfigTree(data, sort_key, key_to_string)
        self.reference = ConfigTree(data, sort_key, key_to_string)

        self._parser = ConfigParser()
        self._policy_parser = PolicyParser()
        self._pattern_parser = PatternParser()
        self._configs = [ConfigTree(data, sort_key, key_to_string)]
        self._load_policy()
Ejemplo n.º 19
0
 def remove_techsupport_file(self):
     '''
     Procedure to remove the tech-support file
     Returns:
         True: on success
         False: on failure
     '''
     try:
         logger.info('Deleting tech-support file: ' + self.ts_file_path)
         con = ConfigParser()
         tftp_config = con.load_common_config().tftp_share
         remote_ip = tftp_config.tftp_server_ip
         remote_user = tftp_config.tftp_user
         remote_passwd = tftp_config.tftp_password
         handle = LinuxUtils(remote_ip, remote_user, remote_passwd)
         handle.connect()
         handle.execute_cmd('rm -f ' + '/TFTP_DIR/' + self.ts_file_path)
         handle.disconnect()
     except:
         dump_error_in_lib()
Ejemplo n.º 20
0
    def _health_check(self, database):
        # create config parser
        cfg_parser = ConfigParser(database)

        # get all collections
        collections = [c['_id'] for c in database['collections'].find({'dropped': {'$ne': True}})]

        # validate that for each collection, the corresponding chunks form a distribution from 
        # MinKey to MaxKey without gaps or overlaps.
        for namespace in collections:
            print '    ', namespace, 
            chunk_dist = cfg_parser.get_chunk_distribution(namespace)
            ret, msgs = chunk_dist.check()
            if ret: 
                print '  ok'
            else:
                print '  failed\n'
                for msg in msgs:
                    print '       ! %s' % msg
                print
    def __init__(self, game_opts):
        State.__init__(self, constants.SCENES['intro'])
        self.game_opts = game_opts
        parser = ConfigParser(constants.MAIN_CFG, constants.CFG_XMLNS)

        # intro slides
        dir_name = parser.first_match('intro').attrib
        slides = [i.text for i in parser.all_matches('slide')]
        slides = [path.join(dir_name['dir'], i) for i in slides]
        slide_num = len(slides)
        self.slides = [ResourceManager().getImage(slides[i]) for i in range(slide_num)]
        self.cutscenes = IntroCutScene(self.slides)

        pygame.mixer.music.set_volume(0.0)
        sound_mixer.play_music(
            constants.FILES['sounds']['menu']['share']['bg'][0])
        if self.game_opts.music:
            pygame.mixer.music.unpause()
        else:
            pygame.mixer.music.pause()
        pygame.mixer.music.set_volume(MAX_VOLUME)
Ejemplo n.º 22
0
def load_config(app):
    global params

    currentdir = os.path.dirname(os.path.realpath(__file__))
    configuration_file = "%s/%s" % (currentdir, 'plugin.cfg')
    logger.info("[WebUI-logs] Plugin configuration file: %s",
                configuration_file)
    try:
        scp = ConfigParser('#', '=')
        z = params.copy()
        z.update(scp.parse_config(configuration_file))
        params = z

        params['logs_type'] = [
            item.strip() for item in params['logs_type'].split(',')
        ]
        if params['logs_hosts']:
            params['logs_hosts'] = [
                item.strip() for item in params['logs_hosts'].split(',')
            ]
        if params['logs_services']:
            params['logs_services'] = [
                item.strip() for item in params['logs_services'].split(',')
            ]

        logger.info("[WebUI-logs] configuration loaded.")
        logger.info("[WebUI-logs] configuration, fetching types: %s",
                    params['logs_type'])
        logger.info("[WebUI-logs] configuration, hosts: %s",
                    params['logs_hosts'])
        logger.info("[WebUI-logs] configuration, services: %s",
                    params['logs_services'])
        return True
    except Exception as exp:
        logger.warning(
            "[WebUI-logs] configuration file (%s) not available: %s",
            configuration_file, str(exp))
        return False
Ejemplo n.º 23
0
class confMgr(object):
    feature_conf = ConfigParser(CONFIG_FILE_NAME)

    @classmethod
    def getConfigKeyList(self):
        return confMgr.feature_conf.config_key_list

    @classmethod
    def getConfigMap(self):
        return confMgr.feature_conf.config_map

    @classmethod
    def setConfigMap(self, new_dict):
        confMgr.feature_conf.update_dictionary(new_dict)
Ejemplo n.º 24
0
def execute_configuration_file(directory, config_file, db_handler):
    config_file_path = join_paths(directory, config_file)

    if not validate_path(config_file_path):
        log_error("Config file not found")
        return -1

    with open(config_file_path, 'r') as stream:
        yaml_dict = yaml.load(stream)
        parser = ConfigParser(yaml_dict)
        db_executor = ConfigExecutor(db_handler, parser)
        db_executor.execute()

    return 0
Ejemplo n.º 25
0
 def run():
     # Init steps
     results = {}
     # Establish the connection with the sut
     cfg_manager = SshNode(host=SUT, name=SUT_NAME, username=SUT_USER)
     # Move to the sut home
     cfg_manager.run_command("cd %s/%s" % (SUT_HOME, FWD_ENGINE))
     # Let's parse the test plan
     parser = ConfigParser(CONFIG_FILE)
     # Run the experiments according to the test plan:
     for config in parser.get_configs():
         # Get the rate class
         rate_to_evaluate = Orchestrator.factory(config.rate)
         # Enforce the configuration
         cfg_manager.run_command("sudo bash %s %s" %
                                 (SUT_CONFIGURATOR, config.experiment))
         # Run the experiments
         values = rate_to_evaluate.run(config)
         # Collect the results
         results['%s-%s' % (config.experiment, config.rate)] = values
     # Finally dump the results on a file and return them
     Orchestrator.dump(results)
     return results
Ejemplo n.º 26
0
 def __init__(self, maec_package, file_name=None, config_directory=None):
     # The input MAEC Package
     self.maec_package = maec_package
     # The output STIX Package (with Indicators)
     self.stix_package = None
     # The input file name
     self.file_name = file_name
     # Parsed configuration structure
     self.config = ConfigParser(config_directory=config_directory)
     # Set the STIX namespace and alias
     stix.utils.set_id_namespace(
         {'https://github.com/MAECProject/maec-to-stix': 'maecToSTIX'})
     # Set the MAEC namespace and alias
     maec.utils.set_id_namespace(
         cybox.utils.Namespace(
             'https://github.com/MAECProject/maec-to-stix', 'maecToSTIX'))
Ejemplo n.º 27
0
 def _required_property_check(self, object, object_properties_dict):
     """Check an Object to make sure it has the specified set of 
        required properties."""
     properties_found = True
     required_properties = object_properties_dict["required"]
     mutually_exclusive_properties = object_properties_dict["mutually_exclusive"]
     pruned_properties = self._prune_object_properties(object.properties.to_dict(), required_properties)
     # Check for the required properties
     if len(ConfigParser.flatten_dict(pruned_properties)) != len(required_properties):
         properties_found = False
     # Check for the mutually exclusive (required) properties
     if mutually_exclusive_properties:
         mutually_exclusive_pruned = self._prune_object_properties(object.properties.to_dict(), mutually_exclusive_properties)
         if len(mutually_exclusive_pruned) != 1:
             properties_found = False
     return properties_found
Ejemplo n.º 28
0
 def run(config):
     results = []
     # We collect run PDR values and we return them
     for iteration in range(0, config.run):
         print "PDR %s-%s Run %s" % (config.type, config.experiment,
                                     iteration)
         # At first we create the experiment factory with the right parameters
         factory = TrexExperimentFactory(
             TREX_SERVER, TX_PORT, RX_PORT,
             "%s/%s.pcap" % (PCAP_HOME, ConfigParser.get_packet(config)),
             SAMPLES, DURATION)
         # Then we instantiate the NDR solver with the above defined parameters
         ndr = NoDropRateSolver(STARTING_TX_RATE, config.line_rate,
                                NDR_WINDOW, LB_DLR, RateType.PPS, factory)
         ndr.solve()
         # Once finished let's collect the results
         results.append(ndr.getSW()[0])
     return results
Ejemplo n.º 29
0
def run():
    config = ConfigParser('config.ini')
    manger = ManagePool(config, ['ttmj',])
    manger.init_works()
    manger.threads_start()
    while True:
        def log_():
            log.logger.info("当前URL队列中还有:" + str(manger.urls_queue.qsize()) + "数据")
        t =threading.Timer(5, log_)
        t.start()
        if manger.urls_queue.empty():
            break
    manger.wait_all_complete()
    manger.save_to_db()

    log.logger.info("URL_成功数:" + str(manger.url_success))
    log.logger.info("URL_失败数" + str(manger.url_failure))
    log.logger.info("MJ_成功数" + str(manger.success))
    log.logger.info("MJ_失败数" + str(manger.failure))
Ejemplo n.º 30
0
 def run(config):
     # We create an array in order to store mrr of each run
     results = []
     # We collect run MRR values and we return them
     for iteration in range(0, config.run):
         print "MRR %s-%s Run %s" % (config.type, config.experiment,
                                     iteration)
         # At first we create the experiment factory with the right parameters
         factory = TrexExperimentFactory(
             TREX_SERVER, TX_PORT, RX_PORT,
             "%s/%s.pcap" % (PCAP_HOME, ConfigParser.get_packet(config)),
             SAMPLES, DURATION)
         # Build the experiment passing a given rate
         experiment = factory.build(RATE)
         # Run and collect the output of the experiment
         run = experiment.run().runs[0]
         # Calculate mrr and then store in the array
         mrr = run.getRxTotalPackets() / DURATION
         results.append(mrr)
     return results
Ejemplo n.º 31
0
 def _required_property_check(self, object, object_properties_dict):
     """Check an Object to make sure it has the specified set of 
        required properties."""
     properties_found = True
     required_properties = object_properties_dict["required"]
     mutually_exclusive_properties = object_properties_dict[
         "mutually_exclusive"]
     pruned_properties = self._prune_object_properties(
         object.properties.to_dict(), required_properties)
     # Check for the required properties
     if len(ConfigParser.flatten_dict(pruned_properties)) != len(
             required_properties):
         properties_found = False
     # Check for the mutually exclusive (required) properties
     if mutually_exclusive_properties:
         mutually_exclusive_pruned = self._prune_object_properties(
             object.properties.to_dict(), mutually_exclusive_properties)
         if len(mutually_exclusive_pruned) != 1:
             properties_found = False
     return properties_found
def main():
    config = ConfigParser()
    hosts_data = read_json(config.central_hosts_path)
    location1_hosts = read_json(config.location1_hosts_path)
    sorted_location1_hosts = sort_location_hosts(location1_hosts,
                                                 config.location1_from_ip,
                                                 config.location1_to_ip)
    location2_hosts = read_json(config.location2_hosts_path)
    sorted_location2_hosts = sort_location_hosts(location2_hosts,
                                                 config.location2_from_ip,
                                                 config.location2_to_ip)
    locations_dict = prepare_data_by_location_hosts(hosts_data,
                                                    config.location1_from_ip,
                                                    config.location1_to_ip,
                                                    config.location2_from_ip,
                                                    config.location2_to_ip)

    tcp_conversations_data = read_json(config.central_tcp_conversations_path)
    location1_tcp_conversations = read_json(
        config.location1_tcp_conversations_path)
    sorted_location1_tcp_conversations = sort_location_tcp_conversations(
        location1_tcp_conversations, config.location1_from_ip,
        config.location1_to_ip)
    location2_tcp_conversations = read_json(
        config.location2_tcp_conversations_path)
    sorted_location2_tcp_conversations = sort_location_tcp_conversations(
        location2_tcp_conversations, config.location2_from_ip,
        config.location2_to_ip)
    sorted_tcp_conversations_data = prepare_data_ip_numeric(
        tcp_conversations_data, config.location1_from_ip,
        config.location1_to_ip, config.location2_from_ip,
        config.location2_to_ip)
    report_dict = format_report(
        locations_dict, sorted_location1_hosts, sorted_location2_hosts,
        sorted_tcp_conversations_data["Tcp Conversations"],
        sorted_location1_tcp_conversations, sorted_location2_tcp_conversations)
    convert_json_to_html(report_dict)
Ejemplo n.º 33
0
    def __init__(self):
        self.keras_handler = KerasHandler()

        self.filters_config = ConfigParser.parse("GUI/filters.json")

        self.root = Tk()
        self.root.title("TFG Pablo Pastor Martín")
        self.root.resizable(0, 0)
        menubar = Menu(self.root)
        menu1 = Menu(menubar, tearoff=0)
        menu1.add_command(label="Open image", command=self.openfile)
        menubar.add_cascade(label="File...", menu=menu1)
        menu2 = Menu(menubar, tearoff=0)
        menubar.add_cascade(label="Visualizar", menu=menu2)
        menu2.add_command(label="Filtro", command=self.visualize_filter)
        menu3 = Menu(menubar, tearoff=0)
        menubar.add_cascade(label="Ayuda", menu=menu3)
        menu3.add_command(label="About...", command=self.info)
        self.root.config(menu=menubar)
        self.mainCanvas = MainCanvas(self)

        self.create_combos()

        self.root.mainloop()
Ejemplo n.º 34
0
def main(config: ConfigParser):
    device = torch.device("cuda:0" if config["n_gpu"] > 0 else "cpu")

    train_iters, test_iters, vocab = load_dataset(config, device)
    print("Vocab size:", len(vocab))

    print("prepare Evaluator...")
    evaluator_path = config["evaluator"]["save_path"]
    if evaluator_path is None or not Path(evaluator_path).exists():
        evaluator = Evaluator.create(**config["evaluator"]["args"])
        config["evaluator"]["save_path"] = (config.save_dir /
                                            "evaluator").as_posix()
        evaluator.save(config["evaluator"]["save_path"])
    else:
        evaluator = Evaluator.load(evaluator_path)
    config.save()

    model_F = config.initialize("arch_generator", module_arch,
                                vocab=vocab).to(device)
    model_D = config.initialize("arch_discriminator", module_arch,
                                vocab=vocab).to(device)

    opt_F = config.initialize("generator_optimizer", module_optim)
    opt_D = config.initialize("discriminator_optimizer", module_optim)
    opt_F.set_parameters(model_F.parameters())
    opt_D.set_parameters(model_D.parameters())

    train(config=config,
          vocab=vocab,
          model_F=model_F,
          model_D=model_D,
          opt_F=opt_F,
          opt_D=opt_D,
          train_iters=train_iters,
          test_iters=test_iters,
          evaluator=evaluator)
Ejemplo n.º 35
0
from chunk import Chunk
from chunk_distribution import ChunkDistribution
from config_parser import ConfigParser

from copy import copy, deepcopy
from dateutil import parser

# create a MongoClient on the correct port
mc = MongoClient(port=27017)

# specify the config database ( here I imported all 3 config servers to 1 mongod, hence config[1,2,3] )
config_db = mc['config']

# create config parser object with the config_db as parameter
cfg_parser = ConfigParser(config_db)

# get all collections
collections = [c['_id'] for c in config_db['collections'].find({'dropped': {'$ne': True}})]

# validate that for each collection, the corresponding chunks form a distribution from 
# MinKey to MaxKey without gaps or overlaps.
for namespace in collections:
    print namespace, 
    chunk_dist = cfg_parser.get_chunk_distribution(namespace)
    if chunk_dist.check(verbose=True):
        print '  ok'

# pick first collection (arbitrary, change to specific namespace here)
namespace = "attribute.attribute"
Ejemplo n.º 36
0
    trainer.train()


if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="PyTorch Template")
    parser.add_argument(
        "-c",
        "--config",
        default=None,
        type=str,
        help="config file path (default: None)",
    )
    parser.add_argument(
        "-r",
        "--resume",
        default=None,
        type=str,
        help="path to latest checkpoint (default: None)",
    )
    parser.add_argument(
        "-d",
        "--device",
        default=None,
        type=str,
        help="indices of GPUs to enable (default: all)",
    )
    args = parser.parse_args()
    config = ConfigParser.parse(args)

    main(config)
Ejemplo n.º 37
0
 def __init__(self):
     self._sock = None
     self._username = None
     self._game: Optional[Game] = None
     self.config_parser = ConfigParser()
Ejemplo n.º 38
0
import sys
from config_parser import ConfigParser

if __name__ == "__main__":
    if len(sys.argv) != 2:
        print "Usage: python parse_config.py [node_config.cold]\n"
        exit()


    config = ConfigParser(sys.argv[1])
    print config.parse()
Ejemplo n.º 39
0
def clean_jvms(conf):

    addresses = ' '.join([node.host for node in conf.beansDistribution])
    if conf.main.standAlone:
        addresses += ' ' + conf.main.host
    print addresses

    classpath = '-cp ' + conf.classPath + ';' + conf.coldLocation
    command = 'java ' + classpath + ' ' + KILLER_CLASS + ' ' + addresses
    print command
    proc = Popen(command)
    print "Cleaning jvms complete."
    sleep(10)




if __name__ == "__main__":
    if len(sys.argv) != 2:
        print "Usage: python cold.py [node_config.cold]\n"
        exit()

    try:
        parser = ConfigParser(sys.argv[1])
        conf = parser.read_node_config()

        clean_paths()
        clean_jvms(conf)

     except exception.ValidationException, e:
        print 'Configuration error: ' + e.value
Ejemplo n.º 40
0
                            '--debug',
                            help='Debug mode',
                            type=bool,
                            required=False,
                            default=False)
    input_args = arg_parser.parse_args()
    config_filename = input_args.config
    is_debug = input_args.debug
    # setting debug logging level
    if is_debug:
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.INFO)

    # get parameters from config file
    parser = ConfigParser(config_filename)
    database_class_name = parser.db_class_name
    database_host = parser.db_host
    database_port = parser.db_port
    logging.debug('DB class name: ' + database_class_name)
    logging.debug('DB host: ' + database_host)
    logging.debug('DB port: ' + database_port)

    # packet sniffer needs iface and filter info
    logging.info('Program start running in sniffing mode')
    sniff_config = parser.sniff_config
    logging.debug(sniff_config)
    packet_sniffer = PacketSniffer(database_class_name, database_host,
                                   database_port, sniff_config)
    packet_sniffer.start_sniffing()
Ejemplo n.º 41
0
 def test_process_invalid_line_too_many_chunks(self):
     invalid_line = '{}{}{}{}{}'.format(self.CHUNK_A, self.DELIMITER,
                                        self.CHUNK_B, self.DELIMITER,
                                        self.CHUNK_A)
     with self.assertRaises(ConfigParserException):
         ConfigParser._process_line(invalid_line, self.DELIMITER)
Ejemplo n.º 42
0
 def test_parse_invalid_config(self):
     with self.assertRaises(ConfigParserException):
         ConfigParser.parse('tests/fixtures/invalid_config', ':')
Ejemplo n.º 43
0
 def test_process_invalid_line_no_delimiter(self):
     invalid_line = '{}{}'.format(self.CHUNK_A, self.CHUNK_B)
     with self.assertRaises(ConfigParserException):
         ConfigParser._process_line(invalid_line, self.DELIMITER)
Ejemplo n.º 44
0
 def test_process_valid_line(self):
     valid_line = '{}{}{}'.format(self.CHUNK_A, self.DELIMITER,
                                  self.CHUNK_B)
     self.assertEqual(
         ConfigParser._process_line(valid_line, self.DELIMITER),
         (self.CHUNK_A, self.CHUNK_B))
Ejemplo n.º 45
0
 def test_process_valid_value_with_strip_chars(self):
     self.assertEqual(
         ConfigParser._process_value('\'{}"\n'.format(self.CHUNK_B)),
         self.CHUNK_B)
Ejemplo n.º 46
0
 def test_process_valid_value(self):
     self.assertEqual(
         ConfigParser._process_value(self.CHUNK_B), self.CHUNK_B)
Ejemplo n.º 47
0
class Config(object):
    """
    The LIO configuration API.

    The Config object provide methods to edit, search, validate and update the
    current configuration, and commit that configuration to the live system on
    request.

    It features pattern-matching search for all configuration objects and
    attributes as well as multi-level undo capabilities. In addition, all
    configuration changes are staged before being applied, isolating the
    current configuration from load-time and validation errors.
    """

    policy_dir = "/var/target/policy"

    def __init__(self):
        data = {"source": {"operation": "init", "timestamp": time.time()}, "type": "root", "policy_path": []}
        self.policy = ConfigTree(data, sort_key, key_to_string)
        self.reference = ConfigTree(data, sort_key, key_to_string)

        self._parser = ConfigParser()
        self._policy_parser = PolicyParser()
        self._pattern_parser = PatternParser()
        self._configs = [ConfigTree(data, sort_key, key_to_string)]
        self._load_policy()

    def _load_policy(self):
        """
        Loads all LIO system policy files.
        """
        filepaths = ["%s/%s" % (self.policy_dir, path) for path in os.listdir(self.policy_dir) if path.endswith(".lio")]
        for filepath in filepaths:
            log.debug("Loading policy file %s" % filepath)
            parse_tree = self._policy_parser.parse_file(filepath)
            source = {
                "operation": "load",
                "filepath": filepath,
                "timestamp": time.time(),
                "mtime": os.path.getmtime(filepath),
            }
            self._load_parse_tree(parse_tree, replace=False, source=source, target="policy")

    def _load_parse_tree(
        self, parse_tree, cur_stage=None, replace=False, source=None, target="config", allow_new_attrs=False
    ):
        """
        target can be 'config', 'policy' or 'reference'
        """
        # TODO accept 'defaults' target too
        if source is None:
            source = {}
        if cur_stage is None:
            update_target = True
            if replace:
                data = {"source": source, "policy_path": [], "type": "root"}
                stage = ConfigTree(data, sort_key, key_to_string)
            elif target == "config":
                stage = self.current.get_clone()
                stage.data["source"] = source
            elif target == "policy":
                stage = self.policy.get_clone()
                stage.data["source"] = source
            elif target == "reference":
                stage = self.reference.get_clone()
                stage.data["source"] = source
        else:
            update_target = False
            stage = cur_stage

        loaded = []
        log.debug("Loading parse tree %s" % parse_tree)
        for statement in parse_tree:
            cur = stage
            log.debug("Visiting statement %s" % statement)
            for token in statement:
                token["source"] = source
                log.debug("Visiting token %s" % token)
                if token["type"] == "obj":
                    log.debug("Loading obj token: %s" % token)
                    if target != "policy":
                        token = self.validate_obj(token, cur)
                    old = cur.get(token["key"])
                    cur = cur.cine(token["key"], token)
                    if not old:
                        loaded.append(cur)
                    if target != "policy":
                        self._add_missing_attributes(cur)
                    log.debug("Added object %s" % cur.path)
                elif token["type"] == "attr":
                    log.debug("Loading attr token: %s" % token)
                    if target != "policy":
                        token = self.validate_attr(token, cur, allow_new_attrs)
                    old_nodes = cur.search([(token["key"][0], ".*")])
                    for old_node in old_nodes:
                        log.debug("Deleting old value: %s\nnew is: %s" % (old_node.path, str(token["key"])))
                        deleted = cur.delete([old_node.key])
                        log.debug("Deleted: %s" % str(deleted))
                    cur = cur.cine(token["key"], token)
                    if old_nodes and old_nodes[0].key != cur.key:
                        loaded.append(cur)
                    log.debug("Added attribute %s" % cur.path)
                elif token["type"] == "group":
                    log.debug("Loading group token: %s" % token)
                    if target != "policy":
                        log.debug("cur '%s' token '%s'" % (cur, token))
                        token["policy_path"] = cur.data["policy_path"] + [(token["key"][0],)]
                    old = cur.get(token["key"])
                    cur = cur.cine(token["key"], token)
                    if not old:
                        loaded.append(cur)
                elif token["type"] == "block":
                    log.debug("Loading block token: %s" % token)
                    for statement in token["statements"]:
                        log.debug("_load_parse_tree recursion on block " "statement: %s" % [statement])
                        loaded.extend(
                            self._load_parse_tree(
                                [statement], cur, source=source, target=target, allow_new_attrs=allow_new_attrs
                            )
                        )

        if update_target:
            if target == "config":
                self.current = stage
            elif target == "policy":
                self.policy = stage
            elif target == "reference":
                self.reference = stage

        return loaded

    def _add_missing_attributes(self, obj):
        """
        Given an obj node, add all missing attributes and attribute groups in
        the configuration.
        """
        source = {"operation": "auto", "timestamp": time.time()}
        policy_root = self.policy.get_path(obj.data["policy_path"])
        for policy_node in [node for node in policy_root.nodes if node.data["type"] == "attr"]:
            attr = obj.search([(policy_node.key[0], ".*")])
            if not attr:
                key = (policy_node.key[0], policy_node.data.get("val_dfl"))
                data = {
                    "key": key,
                    "type": "attr",
                    "source": source,
                    "val_dfl": policy_node.data.get("val_dfl"),
                    "val_type": policy_node.data["val_type"],
                    "required": key[1] is None,
                    "policy_path": policy_node.path,
                }
                log.debug("obj.set(%s, %s)" % (str(key), data))
                obj.set(key, data)

        groups = []
        for policy_node in [node for node in policy_root.nodes if node.data["type"] == "group"]:
            group = obj.get((policy_node.key[0],))
            if not group:
                key = (policy_node.key[0],)
                data = {"key": key, "type": "group", "source": source, "policy_path": policy_node.path}
                groups.append(obj.set(key, data))
            else:
                groups.append(group)

        for group in groups:
            policy_root = self.policy.get_path(group.data["policy_path"])
            for policy_node in [node for node in policy_root.nodes if node.data["type"] == "attr"]:
                attr = group.search([(policy_node.key[0], ".*")])
                if not attr:
                    key = (policy_node.key[0], policy_node.data.get("val_dfl"))
                    data = {
                        "key": key,
                        "type": "attr",
                        "source": source,
                        "val_dfl": policy_node.data.get("val_dfl"),
                        "val_type": policy_node.data["val_type"],
                        "required": key[1] is None,
                        "policy_path": policy_node.path,
                    }
                    group.set(key, data)

    def validate_val(self, value, val_type, parent=None):
        valid_value = None
        log.debug("validate_val(%s, %s)" % (value, val_type))
        if value == NO_VALUE:
            return None

        if val_type == "bool":
            if value.lower() in ["yes", "true", "1", "enable"]:
                valid_value = "yes"
            elif value.lower() in ["no", "false", "0", "disable"]:
                valid_value = "no"
        elif val_type == "bytes":
            match = re.match(r"(\d+(\.\d*)?)([kKMGT]?B?$)", value)
            if match:
                qty = str(float(match.group(1)))
                unit = match.group(3).upper()
                if not unit.endswith("B"):
                    unit += "B"
                valid_value = "%s%s" % (qty, unit)
        elif val_type == "int":
            try:
                valid_value = str(int(value))
            except:
                pass
        elif val_type == "ipport":
            (addr, _, port) = value.rpartition(":")
            try:
                str(int(port))
            except:
                pass
            else:
                try:
                    listen_all = int(addr.replace(".", "")) == 0
                except:
                    listen_all = False
                if listen_all:
                    valid_value = "0.0.0.0:%s" % port
                elif addr in list_eth_ips():
                    valid_value = value
        elif val_type == "posint":
            try:
                val = int(value)
            except:
                pass
            else:
                if val > 0:
                    valid_value = value
        elif val_type == "str":
            valid_value = str(value)
            forbidden = "*?[]"
            for char in forbidden:
                if char in valid_value:
                    valid_value = None
                    break
        elif val_type == "erl":
            if value in ["0", "1", "2"]:
                valid_value = value
        elif val_type == "iqn":
            if is_valid_wwn("iqn", value):
                valid_value = value
        elif val_type == "naa":
            if is_valid_wwn("naa", value):
                valid_value = value
        elif val_type == "backend":
            if is_valid_backend(value, parent):
                valid_value = value
        else:
            raise ConfigError("Unknown value type '%s' when validating %s" % (val_type, value))
        log.debug("validate_val(%s) is a valid %s: %s" % (value, val_type, valid_value))
        return valid_value

    def validate_obj(self, token, parent):
        log.debug("validate_obj(%s, %s)" % (token, parent.data))
        policy_search = parent.data["policy_path"] + [(token["key"][0], ".*")]
        policy_nodes = self.policy.search(policy_search)
        valid_token = copy.deepcopy(token)
        expected_val_types = set()

        for policy_node in policy_nodes:
            id_fixed = policy_node.data["id_fixed"]
            id_type = policy_node.data["id_type"]
            if id_fixed is not None:
                expected_val_types.add("'%s'" % id_fixed)
                if id_fixed == token["key"][1]:
                    valid_token["policy_path"] = policy_node.path
                    return valid_token
            else:
                expected_val_types.add(id_type)
                valid_value = self.validate_val(valid_token["key"][1], id_type)
                if valid_value is not None:
                    valid_token["key"] = (valid_token["key"][0], valid_value)
                    valid_token["policy_path"] = policy_node.path
                    return valid_token

        if not policy_nodes:
            obj_type = ("%s %s" % (parent.path_str, token["key"][0])).strip()
            raise ConfigError("Unknown object type: %s" % obj_type)
        else:
            raise ConfigError(
                "Invalid %s identifier '%s': expected type %s"
                % (token["key"][0], token["key"][1], ", ".join(expected_val_types))
            )

    def validate_attr(self, token, parent, allow_new_attr=False):
        log.debug("validate_attr(%s, %s)" % (token, parent.data))
        if token["key"][1] is None:
            return token

        policy_search = parent.data["policy_path"] + [(token["key"][0], ".*")]
        policy_nodes = self.policy.search(policy_search)
        valid_token = copy.deepcopy(token)
        expected_val_types = set()
        for policy_node in policy_nodes:
            ref_path = policy_node.data["ref_path"]
            valid_token["required"] = policy_node.data["required"]
            valid_token["comment"] = policy_node.data["comment"]
            valid_token["val_dfl"] = policy_node.data.get("val_dfl")
            valid_token["val_type"] = policy_node.data["val_type"]
            if ref_path is not None:
                root = parent
                if ref_path.startswith("-"):
                    (upno, _, down) = ref_path[1:].partition(" ")
                    for i in range(int(upno) - 1):
                        root = root.parent
                else:
                    while not root.is_root:
                        root = root.parent

                search_path = [(down, token["key"][1])]
                nodes = root.search(search_path)

                if len(nodes) == 1:
                    valid_token["ref_path"] = nodes[0].path_str
                    return valid_token
                elif len(nodes) == 0:
                    raise ConfigError("Invalid reference for attribute %s: %s" % (token["key"][0], search_path))
                else:
                    raise ConfigError("Unexpected reference error, got: %s" % nodes)

                return valid_token
            else:
                expected_val_types.add(policy_node.data["val_type"])
                if valid_token["key"][1] == NO_VALUE:
                    valid_value = NO_VALUE
                else:
                    valid_value = self.validate_val(valid_token["key"][1], policy_node.data["val_type"], parent=parent)
                if valid_value is not None:
                    valid_token["key"] = (valid_token["key"][0], valid_value)
                    return valid_token

        if not policy_nodes:
            if allow_new_attr:
                valid_token["required"] = False
                valid_token["comment"] = "Unknown"
                valid_token["val_dfl"] = valid_token["key"][1]
                valid_token["val_type"] = "raw"
                valid_token["ref_path"] = None
                return valid_token
            else:
                attr_name = ("%s %s" % (parent.path_str, token["key"][0])).strip()
                raise ConfigError("Unknown attribute: %s" % attr_name)
        else:
            raise ConfigError(
                "Invalid %s value '%s': expected type %s"
                % (token["key"][0], token["key"][1], ", ".join(expected_val_types))
            )

    @property
    def current(self):
        return self._configs[-1]

    @current.setter
    def current(self, config_tree):
        self._configs.append(config_tree)

    def undo(self):
        """
        Restores the previous state of the configuration, before the last set,
        load, delete, update or clear operation. If there is nothing to undo, a
        ConfigError exception will be raised.
        """
        if len(self._configs) < 2:
            raise ConfigError("Nothing to undo")
        else:
            self._configs.pop()

    def set(self, configuration):
        """
        Evaluates the configuration (a string in LIO configuration format) and
        sets the relevant objects, attributes and atttribute groups.

        Existing attributes and objects will be updated if needed and new ones
        will be added.

        The list of created configuration nodes will be returned.

        If an error occurs, the operation will be aborted, leaving the current
        configuration intact.
        """
        parse_tree = self._parser.parse_string(configuration)
        source = {"operation": "set", "data": configuration, "timestamp": time.time()}
        return self._load_parse_tree(parse_tree, source=source)

    def delete(self, pattern, node_filter=lambda x: x):
        """
        Deletes all configuration objects and attributes whose paths match the
        pattern, along with their children.

        The pattern is a single LIO configuration statement without any block,
        where object identifiers, attributes names, attribute values and
        attribute groups are regular expressions patterns. Object types have to
        use their exact string representation to match.

        node_filter is a function applied to each node before returning it:
            node_filter(node_in) -> node_out | None (aka filtered out)

        Returns a list of all deleted nodes.

        If an error occurs, the operation will be aborted, leaving the current
        configuration intact.
        """
        path = [token for token in self._pattern_parser.parse_string(pattern)]
        log.debug("delete(%s)" % pattern)
        source = {"operation": "delete", "pattern": pattern, "timestamp": time.time()}
        stage = self.current.get_clone()
        stage.data["source"] = source
        deleted = []
        for node in stage.search(path, node_filter):
            log.debug("delete() found node %s" % node)
            deleted.append(stage.delete(node.path))
        self.current = stage
        return deleted

    def load(self, filepath, allow_new_attrs=False):
        """
        Loads an LIO configuration file and replace the current configuration
        with it.

        All existing objects and attributes will be deleted, and new ones will
        be added.

        If an error occurs, the operation will be aborted, leaving the current
        configuration intact.
        """
        parse_tree = self._parser.parse_file(filepath)
        source = {
            "operation": "load",
            "filepath": filepath,
            "timestamp": time.time(),
            "mtime": os.path.getmtime(filepath),
        }
        self._load_parse_tree(parse_tree, replace=True, source=source, allow_new_attrs=allow_new_attrs)

    def load_live(self):
        """
        Loads the live-running configuration.
        """
        from config_live import dump_live

        live = dump_live()
        parse_tree = self._parser.parse_string(live)
        source = {"operation": "resync", "timestamp": time.time()}
        self._load_parse_tree(parse_tree, replace=True, source=source, allow_new_attrs=True)

    def update(self, filepath):
        """
        Updates the current configuration with the contents of an LIO
        configuration file.

        Existing attributes and objects will be updated if needed and new ones
        will be added.

        If an error occurs, the operation will be aborted, leaving the current
        configuration intact.
        """
        parse_tree = self._parser.parse_file(filepath)
        source = {
            "operation": "update",
            "filepath": filepath,
            "timestamp": time.time(),
            "mtime": os.path.getmtime(filepath),
        }
        self._load_parse_tree(parse_tree, source=source)

    def clear(self):
        """
        Clears the current configuration.

        This removes all current objects and attributes from the configuration.
        """
        source = {"operation": "clear", "timestamp": time.time()}
        self.current = ConfigTree({"source": source}, sort_key, key_to_string)

    def search(self, search_statement, node_filter=lambda x: x):
        """
        Returns a list of nodes matching the search_statement, relative to the
        current node, or an empty list if no match was found.

        The search_statement is a single LIO configuration statement without
        any block, where object identifiers, attributes names, attribute values
        and attribute groups are regular expressions patterns. Object types
        have to use their exact string representation to match.

        node_filter is a function applied to each node before returning it:
            node_filter(node_in) -> node_out | None (aka filtered out)
        """
        path = [token for token in self._pattern_parser.parse_string(search_statement)]
        return self.current.search(path, node_filter)

    def dump(self, search_statement=None, node_filter=lambda x: x):
        """
        Returns a LIO configuration file format dump of the nodes matching
        the search_statement, or of all nodes if search_statement is None.

        The search_statement is a single LIO configuration statement without
        any block, where object identifiers, attributes names, attribute values
        and attribute groups are regular expressions patterns. Object types
        have to use their exact string representation to match.

        node_filter is a function applied to each node before dumping it:
            node_filter(node_in) -> node_out | None (aka filtered out)
        """
        # FIXME: Breaks with filter_only_missing
        if not search_statement:
            root_nodes = [self.current]
        else:
            root_nodes = self.search(search_statement, node_filter)

        if root_nodes:
            parts = []
            for root_node_in in root_nodes:
                root_node = node_filter(root_node_in)
                if root_node is None:
                    break
                dump = ""
                if root_node.key_str:
                    dump = "%s " % root_node.key_str
                nodes = root_node.nodes
                if root_node.is_root or len(nodes) == 1:
                    for node in nodes:
                        section = self.dump(node.path_str, node_filter)
                        if section:
                            dump += section
                elif len(nodes) > 1:
                    dump += "{\n"
                    for node in nodes:
                        section = self.dump(node.path_str, node_filter)
                        if section is not None:
                            lines = section.splitlines()
                        else:
                            lines = []
                        dump += "\n".join("    %s" % line for line in lines if line)
                        dump += "\n"
                    dump += "}\n"
                parts.append(dump)
            dump = "\n".join(parts)
            if dump.strip():
                return dump

    def save(self, filepath, pattern=None):
        """
        Saves the current configuration to filepath, using LIO configuration
        file format. If path is not None, only objects and attributes starting
        at path and hanging under it will be saved.

        For convenience, the saved configuration will also be returned as a
        string.

        The pattern is a whitespace-separated string of regular expressions,
        each of which will be matched against configuration objects and
        attributes. In case of dump, the pattern must be non-ambiguous and
        match only a single configuration node.
        
        If the pattern matches either zero or more than one configuration
        nodes, a ConfigError exception will be raised.
        """
        dump = self.dump(pattern, filter_no_missing)
        if dump is None:
            dump = ""
        with open(filepath, "w") as f:
            f.write(dump)
        return dump

    def verify(self):
        """
        Validates the configuration for the following points:
            - Portal IP Addresses exist
            - Devices and file paths exist
            - Files for fileio exist
            - No required attributes are missing
            - References are correct
        Returns a dictionary of validation_test: [errors]
        """
        return {}

    def apply(self, brute_force=True):
        """
        Applies the configuration to the live system:
            - Remove objects absent from the configuration and objects in the
              configuration with different required attributes
            - Create new storage objects
            - Create new fabric objects
            - Update relevant storage objects
            - Update relevant fabric objects
        """
        from config_live import apply_create_obj, apply_delete_obj

        if brute_force:
            from config_live import apply_create_obj, clear_configfs

            yield "[clear] delete all live objects"
            clear_configfs()
            for obj in self.current.walk(get_filter_on_type(["obj"])):
                yield ("[create] %s" % obj.path_str)
                apply_create_obj(obj)
        else:
            # TODO for minor_obj, update instead of create/delete
            diff = self.diff_live()
            delete_list = diff["removed"] + diff["major_obj"] + diff["minor_obj"]
            delete_list.reverse()
            for obj in delete_list:
                yield "[delete] %s" % obj.path_str
                apply_delete_obj(obj)

            for obj in diff["created"] + diff["major_obj"] + diff["minor_obj"]:
                yield "[create] %s" % obj.path_str
                apply_create_obj(obj)

    def diff_live(self):
        """
        Returns a diff between the current configuration and the live
        configuration as a reference.
        """
        from config_live import dump_live

        parse_tree = self._parser.parse_string(dump_live())
        source = {"operation": "load", "timestamp": time.time()}
        self._load_parse_tree(parse_tree, replace=True, source=source, target="reference", allow_new_attrs=True)
        return self.diff()

    def diff(self):
        """
        Computes differences between a valid current configuration and a
        previously loaded valid reference configuration.

        Returns a dict of:
            - 'removed': list of removed objects
            - 'major': list of changed required attributes
            - 'major_obj': list of obj with major changes
            - 'minor': list of changed non-required attributes
            - 'major_obj': list of obj with minor changes
            - 'created': list of new objects in the current configuration
        """
        # FIXME  data['required'] check should be enough without NO_VALUE check
        # FIXME Can't we just pass the reference config instead of having to preload it?
        diffs = {}
        keys = ("removed", "major", "major_obj", "minor", "minor_obj", "created")
        for key in keys:
            diffs[key] = []

        for obj in self.current.walk(get_filter_on_type(["obj"])):
            if not self.reference.get_path(obj.path):
                diffs["created"].append(obj)

        for obj in self.reference.walk(get_filter_on_type(["obj"])):
            if not self.current.get_path(obj.path):
                diffs["removed"].append(obj)

        for obj in self.current.walk(get_filter_on_type(["obj"])):
            if self.reference.get_path(obj.path):
                for node in obj.nodes:
                    if node.data["type"] == "attr" and (node.data["required"] or node.key[1] == NO_VALUE):
                        if not self.reference.get_path(node.path):
                            diffs["major"].append(node)
                            diffs["major_obj"].append(node.parent)

        for obj in self.current.walk(get_filter_on_type(["obj"])):
            if self.reference.get_path(obj.path):
                for node in obj.nodes:
                    if node.data["type"] == "attr" and not node.data["required"] and node.key[1] != NO_VALUE:
                        if not self.reference.get_path(node.path):
                            diffs["minor"].append(node)
                            if node.parent not in diffs["minor_obj"] and node.parent not in diffs["major_obj"]:
                                diffs["minor_obj"].append(node.parent)
                    elif node.data["type"] == "group":
                        for attr in node.nodes:
                            if attr.data["type"] == "attr" and not attr.data["required"] and attr.key[1] != NO_VALUE:
                                if not self.reference.get_path(attr.path):
                                    diffs["minor"].append(attr)
                                    if node.parent not in diffs["minor_obj"] and node.parent not in diffs["major_obj"]:
                                        diffs["minor_obj"].append(node.parent)
        return diffs