Beispiel #1
0
    def build_sources(self):
        self.sources = {}  # { source_context: hostname:source_path, ... }
        self.paths = {}  # { source_context: local_path, ... }
        self.inventory = {}  # { source_context: source:inventory() }
        # { filename: (size, ncopies), }
        self.backups = {}  # local backups (intended or actual)
        #  { source_context: {filename: size,}, }
        self.scanners = {}  # my local storage (actual)
        self.claims = {}  # { source_context: { filename : time() }, }
        self.random_source_list = []  # [ list, of, sources ]
        self.datagrams = {}  # internal storage of connections
        self.metadata = {}  # internal storage of server metadata

        lazy_write = get_interval(self.config, "LAZY WRITE", (self.context, ))
        source_contexts = self.config.get_contexts_for_key("source")
        self.prune_sources(source_contexts)

        for source_context, source in source_contexts.items():
            self.sources[source_context] = source
            path = f"{self.path}/{source_context}"
            self.paths[source_context] = path
            self.scanners[source_context] = \
                scanner.ScannerLite(source_context, path,
                                pd_path=self.path, loglevel=logging.INFO,
                                name=f"{self.context}:{source_context}")
            claims = f"{self.path}/claims-{self.context}:{source_context}.bz2"
            self.claims[source_context] = PersistentDict(claims,
                                                         lazy_write=lazy_write)
            self.backups[source_context] = {}
            self.random_source_list.append(source_context)
        random.shuffle(self.random_source_list)
    def __init__(self, context):
        super().__init__()
        self.context = context
        self.config = config.Config.instance()
        self.logger = logging.getLogger(logger_str(__class__) + " " + context)
        self.logger.info(f"Creating clientlet {self.context}")

        self.path = config.path_for(self.config.get(self.context, "backup"))
        assert os.path.exists(self.path), f"{self.path} does not exist!"

        # ALL source contexts (we care a lot)
        self.sources = {}
        self.scanners = {}
        self.random_source_list = []
        self.build_sources()

        lazy_write = utils.str_to_duration(
            self.config.get(context, "LAZY WRITE", 5))
        # TODO: my cache of claims should expire in rescan/2
        self.rescan = self.get_interval("rescan") // 2
        self.claims = PersistentDict(f"/tmp/cb.c{context}.json.bz2",
                                     lazy_write=5,
                                     expiry=self.rescan)
        self.drops = 0  # count the number of times I drop a file
        self.stats = stats.Stats()

        self.update_allocation()
        self.bailing = False
        self.datagrams = {}
Beispiel #3
0
    def initialiseCachedParams(self):
        '''
        Initialises default parameters from JSON file-backed store, or creates them
        if not existing in file from coded defaults
        '''   
        self.param_cache_file = self.default_config_path + "/lpdFemGui_config.json"

        self.cached_params = PersistentDict(self.param_cache_file, 'c', format='json')

        default_params = { 'connectionTimeout' : 5.0,
                          'femAddr'           : '192.168.2.2',
                          'femPort'           : 6969,
                          'readoutParamFile'  : self.default_config_path + '/superModuleReadout.xml',
                          'cmdSequenceFile'   : self.default_config_path + '/Command_LongExposure_V2.xml',
                          'setupParamFile'    : self.default_config_path + '/Setup_LowPower.xml',
                          'dataFilePath'      : '/tmp',
                          'analysisPdfPath'   : '/tmp',
                          'hvBiasVolts'       : 50.0,
                          'numTrains'         : 8,
                          'externalTrigger'   : True,
                          'triggerDelay'      : 0,
                          'pwrAutoUpdate'     : False,
                          'pwrUpdateInterval' : 0,
                          'runNumber'         : 0,
                          'fileWriteEnable'   : True,
                          'liveViewEnable'    : False,
                          'liveViewDivisor'   : 1,
                          'liveViewOffset'    : 0,
                          'evrMcastGroup'     : '239.255.16.17',
                          'evrMcastPort'      : 10151,
                          'evrMcastInterface' : '172.21.22.69',
                          'evrRecordEnable'   : True,
                          'femAsicGainOverride' : 8,
                          'femAsicPixelFeedbackOverride' : 0,
                          'asicModuleType'      : 0,
                          'multiRunEnable'    : True,
                          'multiRunNumRuns'   : 123,
                          'receiveDataInternally': False,   # Data received either internally or from ODIN

                        'odinFrCtrlChannel'  : 'tcp://127.0.0.1:5000',
                        'odinFpCtrlChannel'  : 'tcp://127.0.0.1:5004',
                        # TODO: Plan file structure for odinDataConfigFile
                        'odinDataConfigFile' : self.default_config_path + '/odin_data_lpd_config.json'
                         }

        # List of parameter names that don't need to force a system reconfigure
        self.non_volatile_params = ('fileWriteEnable', 'liveViewEnable', 'liveViewDivisor', 'liveViewOffset',
                                  'pwrAutoUpdate', 'pwrUpdateInterval', 'dataFilePath', 'hvBiasBolts',
                                  'multiRunEnable', 'multiNumRuns')

        # Load default parameters into cache if not already existing        
        for param in default_params:
            if param not in self.cached_params:
                self.cached_params[param] = default_params[param]

        # Sync cached parameters back to file
        self.cached_params.sync()
Beispiel #4
0
    def __init__(self, context):
        super().__init__()
        self.context = context

        logger_str = f"{utils.logger_str(__class__)} {context}"
        self.logger = logging.getLogger(logger_str)
        # self.logger.setLevel(logging.INFO)

        self.config = config.Config.instance()
        self.copies = int(self.config.get(self.context, "copies", 2))
        self.path = config.path_for(self.config.get(self.context, "source"))
        self.scanner = scanner.ScannerLite(self.context, self.path)
        self.rescan = utils.get_interval(self.config, "rescan", self.context)

        lazy_write = self.config.get(context, "LAZY WRITE", 5)
        lazy_write = utils.str_to_duration(lazy_write)
        # self.clients: { filename : { client: expiry_time, } }
        clients_state = f"/tmp/cb.{context}-clients.json.bz2"
        self.clients = PersistentDict(clients_state, lazy_write=5)
        self.stats = stats.Stats()
        self.handling = False
def test_read_from_file(tmpfile, test_data):
    """Should properly read from existing file"""
    with open(tmpfile, 'wb') as fh:
        fh.write(pickle.dumps(test_data))
    test_dict = PersistentDict(location=tmpfile)
    assert test_dict == test_data
def test_clear(tmpfile):
    """Clear must also clear storage file"""
    test_dict = PersistentDict({'a': 42, 'b': 1464}, location=tmpfile)
    test_dict.clear()
    assert test_dict == {}
    assert not os.path.exists(tmpfile)