def __init__(self,
                 config,
                 datasource_name,
                 dataset_expr,
                 dataset_nick=None,
                 dataset_proc=None):
        DataProvider.__init__(self, config, datasource_name, dataset_expr,
                              dataset_nick, dataset_proc)
        self._common_prefix = max(DataProvider.enum_value_list) + 1
        self._common_metadata = max(DataProvider.enum_value_list) + 2

        self._entry_handler_info = {
            'events': (DataProvider.NEntries, int, 'block entry counter'),
            'id': (None, None, 'dataset ID'),  # legacy key - skip
            'metadata':
            (DataProvider.Metadata, parse_json, 'metadata description'),
            'metadata common':
            (self._common_metadata, parse_json, 'common metadata'),
            'nickname': (DataProvider.Nickname, str, 'dataset nickname'),
            'prefix': (self._common_prefix, str, 'common prefix'),
            'se list':
            (DataProvider.Locations, lambda value: parse_list(value, ','),
             'block location'),
        }

        (path, self._forced_prefix,
         self._filter) = split_opt(dataset_expr, '@%')
        self._filename = config.resolve_path(
            path, True, 'Error resolving dataset file: %s' % path)
 def __init__(self, config, datasetExpr, datasetNick, datasetID,
              providerList):
     DataProvider.__init__(self, config, datasetExpr, datasetNick,
                           datasetID)
     self._providerList = providerList
     for provider in self._providerList:
         provider.setPassthrough()
    def __init__(self, config, datasetExpr, datasetNick, datasetID=0):
        DataProvider.__init__(self, config, datasetExpr, datasetNick, datasetID)

        (self._path, self._events, selist) = utils.optSplit(datasetExpr, "|@")
        self._selist = utils.parseList(selist, delimeter=",", onEmpty=None)
        if not (self._path and self._events):
            raise ConfigError("Invalid dataset expression!\nCorrect: /local/path/to/file|events[@SE1,SE2]")
	def __init__(self, config, datasetExpr, datasetNick = None, datasetID = 0):
		DataProvider.__init__(self, config, datasetExpr, datasetNick, datasetID)

		(self._path, self._events, selist) = utils.optSplit(datasetExpr, '|@')
		self._selist = parseList(selist, ',') or None
		if not (self._path and self._events):
			raise ConfigError('Invalid dataset expression!\nCorrect: /local/path/to/file|events[@SE1,SE2]')
Exemple #5
0
	def __init__(self, config, datasource_name, dataset_expr,
			dataset_nick, dataset_proc, scanner_list_default):
		DataProvider.__init__(self, config, datasource_name, dataset_expr, dataset_nick, dataset_proc)
		# Configure scanners
		scanner_config = config.change_view(default_on_change=TriggerResync(['datasets', 'parameters']))
		self._interactive_assignment = config.is_interactive('dataset name assignment', True)

		def _create_scanner(scanner_name):
			return InfoScanner.create_instance(scanner_name, scanner_config, datasource_name)
		scanner_list = scanner_config.get_list('scanner', scanner_list_default) + ['NullScanner']
		self._scanner_list = lmap(_create_scanner, scanner_list)

		# Configure dataset / block naming and selection
		def _setup(prefix):
			selected_hash_list = scanner_config.get_list(join_config_locations(prefix, 'key select'), [])
			name = scanner_config.get(join_config_locations(prefix, 'name pattern'), '')
			return (selected_hash_list, name)
		(self._selected_hash_list_dataset, self._dataset_pattern) = _setup('dataset')
		(self._selected_hash_list_block, self._block_pattern) = _setup('block')

		# Configure hash input for separation of files into datasets / blocks
		def _get_active_hash_input(prefix, guard_entry_idx):
			hash_input_list_user = scanner_config.get_list(join_config_locations(prefix, 'hash keys'), [])
			hash_input_list_guard = scanner_config.get_list(join_config_locations(prefix, 'guard override'),
				lchain(imap(lambda scanner: scanner.get_guard_keysets()[guard_entry_idx], self._scanner_list)))
			return hash_input_list_user + hash_input_list_guard
		self._hash_input_set_dataset = _get_active_hash_input('dataset', 0)
		self._hash_input_set_block = _get_active_hash_input('block', 1)
	def __init__(self, config, datasource_name, dataset_expr, dataset_nick, provider_list):
		for provider in provider_list:
			provider.disable_stream_singletons()
		DataProvider.__init__(self, config, datasource_name, dataset_expr, dataset_nick)
		self._stats = DataProcessor.create_instance('SimpleStatsDataProcessor', config,
			'dataset', self._log, 'Summary: Running over ')
		self._provider_list = provider_list
	def __init__(self, config, datasource_name, dataset_expr, dataset_nick=None, dataset_proc=None):
		DataProvider.__init__(self, config, datasource_name, dataset_expr, dataset_nick, dataset_proc)

		(self._path, self._events, selist) = split_opt(dataset_expr, '|@')
		self._selist = parse_list(selist, ',') or None
		if not (self._path and self._events):
			raise ConfigError('Invalid dataset expression!\nCorrect: /local/path/to/file|events[@SE1,SE2]')
	def __init__(self, config, datasetExpr, datasetNick = None):
		DataProvider.__init__(self, config, datasetExpr, datasetNick)

		(self._path, self._events, selist) = utils.optSplit(datasetExpr, '|@')
		self._selist = parseList(selist, ',') or None
		if not (self._path and self._events):
			raise ConfigError('Invalid dataset expression!\nCorrect: /local/path/to/file|events[@SE1,SE2]')
Exemple #9
0
 def __init__(self, config, datasetExpr, datasetNick, sList):
     DataProvider.__init__(self, config, datasetExpr, datasetNick)
     (self._ds_select, self._ds_name, self._ds_keys_user,
      self._ds_keys_guard) = self._setup(config, 'dataset')
     (self._b_select, self._b_name, self._b_keys_user,
      self._b_keys_guard) = self._setup(config, 'block')
     scanList = config.getList('scanner', sList) + ['NullScanner']
     self._scanner = lmap(
         lambda cls: InfoScanner.createInstance(cls, config), scanList)
Exemple #10
0
	def __init__(self, config, datasetExpr, datasetNick = None, datasetID = 0):
		DataProvider.__init__(self, config, '', datasetNick, datasetID)
		def DSB(cFun, n, *args, **kargs):
			return (cFun('dataset %s' % n, *args, **kargs), cFun('block %s' % n, *args, **kargs))
		(self.nameDS, self.nameB) = DSB(config.get, 'name pattern', '')
		(self.kUserDS, self.kUserB) = DSB(config.getList, 'hash keys', [])
		(self.kGuardDS, self.kGuardB) = DSB(config.getList, 'guard override', [])
		self.kSelectDS = config.getList('dataset key select', [])
		scanList = config.getList('scanner', datasetExpr) + ['NullScanner']
		self.scanner = lmap(lambda cls: InfoScanner.createInstance(cls, config), scanList)
	def __init__(self, config, datasource_name, dataset_expr, dataset_nick=None, dataset_proc=None):
		DataProvider.__init__(self, config, datasource_name, dataset_expr, dataset_nick, dataset_proc)

		ds_config = config.change_view(view_class='SimpleConfigView',
			set_sections=['datasource %s' % dataset_expr])
		self._block = self._read_block(ds_config, dataset_expr, dataset_nick)

		def _on_change(config, old_obj, cur_obj, cur_entry, obj2str):
			self._log.critical('Dataset %r changed', dataset_expr)
			return TriggerResync(['datasets', 'parameters'])(config, old_obj, cur_obj, cur_entry, obj2str)
		ds_config.get('dataset hash', self._get_dataset_hash(), persistent=True, on_change=_on_change)
	def __init__(self, config, datasetExpr, datasetNick = None, datasetID = 0):
		DataProvider.__init__(self, config, datasetExpr, datasetNick, datasetID)

		config = config.changeView(viewClass = 'SimpleConfigView', setSections = ['datasource %s' % datasetExpr])
		self._block = self._readBlockFromConfig(config, datasetExpr, datasetNick, datasetID)

		dataset_hash_new = md5_hex(repr(self._block))
		dataset_hash_old = config.get('dataset hash', dataset_hash_new, persistent = True)
		self._request_resync = dataset_hash_new != dataset_hash_old
		if self._request_resync:
			self._log.critical('Dataset %r changed', datasetExpr)
			config.setState(True, 'resync', detail = 'dataset')
			config.setState(True, 'resync', detail = 'parameters')
			config.set('dataset hash', dataset_hash_new)
    def __init__(self,
                 config,
                 datasource_name,
                 dataset_expr,
                 dataset_nick=None,
                 dataset_proc=None):
        DataProvider.__init__(self, config, datasource_name, dataset_expr,
                              dataset_nick, dataset_proc)

        (self._path, self._events, selist) = split_opt(dataset_expr, '|@')
        self._selist = parse_list(selist, ',') or None
        if not (self._path and self._events):
            raise ConfigError(
                'Invalid dataset expression!\nCorrect: /local/path/to/file|events[@SE1,SE2]'
            )
	def __init__(self, config, datasetExpr, datasetNick = None, datasetID = 0):
		DataProvider.__init__(self, config, datasetExpr, datasetNick, datasetID)
		self._CommonPrefix = max(self.enumValues) + 1
		self._CommonMetadata = max(self.enumValues) + 2

		self._handleEntry = {
			'events': (DataProvider.NEntries, int, 'block entry counter'),
			'id': (DataProvider.DatasetID, int, 'dataset ID'),
			'metadata': (DataProvider.Metadata, parseJSON, 'metadata description'),
			'metadata common': (self._CommonMetadata, parseJSON, 'common metadata'),
			'nickname': (DataProvider.Nickname, str, 'dataset nickname'),
			'prefix': (self._CommonPrefix, str, 'common prefix'),
			'se list': (DataProvider.Locations, lambda value: parseList(value, ','), 'block location'),
		}

		(path, self._forcePrefix, self._filter) = utils.optSplit(datasetExpr, '@%')
		self._filename = config.resolvePath(path, True, 'Error resolving dataset file: %s' % path)
	def __init__(self, config, datasource_name, dataset_expr, dataset_nick=None, dataset_proc=None):
		DataProvider.__init__(self, config, datasource_name, dataset_expr, dataset_nick, dataset_proc)
		self._common_prefix = max(DataProvider.enum_value_list) + 1
		self._common_metadata = max(DataProvider.enum_value_list) + 2

		self._entry_handler_info = {
			'events': (DataProvider.NEntries, int, 'block entry counter'),
			'id': (None, None, 'dataset ID'),  # legacy key - skip
			'metadata': (DataProvider.Metadata, parse_json, 'metadata description'),
			'metadata common': (self._common_metadata, parse_json, 'common metadata'),
			'nickname': (DataProvider.Nickname, str, 'dataset nickname'),
			'prefix': (self._common_prefix, str, 'common prefix'),
			'se list': (DataProvider.Locations, lambda value: parse_list(value, ','), 'block location'),
		}

		(path, self._forced_prefix, self._filter) = split_opt(dataset_expr, '@%')
		self._filename = config.resolve_path(path, True, 'Error resolving dataset file: %s' % path)
	def __init__(self, config, datasetExpr, datasetNick = None):
		DataProvider.__init__(self, config, datasetExpr, datasetNick)
		self._CommonPrefix = max(self.enumValues) + 1
		self._CommonMetadata = max(self.enumValues) + 2

		self._handleEntry = {
			'events': (DataProvider.NEntries, int, 'block entry counter'),
			'id': (None, None, 'dataset ID'), # legacy key - skip
			'metadata': (DataProvider.Metadata, parseJSON, 'metadata description'),
			'metadata common': (self._CommonMetadata, parseJSON, 'common metadata'),
			'nickname': (DataProvider.Nickname, str, 'dataset nickname'),
			'prefix': (self._CommonPrefix, str, 'common prefix'),
			'se list': (DataProvider.Locations, lambda value: parseList(value, ','), 'block location'),
		}

		(path, self._forcePrefix, self._filter) = utils.optSplit(datasetExpr, '@%')
		self._filename = config.resolvePath(path, True, 'Error resolving dataset file: %s' % path)
Exemple #17
0
    def __init__(self, config, datasetExpr, datasetNick=None, datasetID=0):
        DataProvider.__init__(self, config, datasetExpr, datasetNick,
                              datasetID)

        config = config.changeView(viewClass='SimpleConfigView',
                                   setSections=['datasource %s' % datasetExpr])
        self._block = self._readBlockFromConfig(config, datasetExpr,
                                                datasetNick, datasetID)

        dataset_hash_new = md5_hex(repr(self._block))
        dataset_hash_old = config.get('dataset hash',
                                      dataset_hash_new,
                                      persistent=True)
        self._request_resync = dataset_hash_new != dataset_hash_old
        if self._request_resync:
            self._log.critical('Dataset %r changed', datasetExpr)
            config.setState(True, 'resync', detail='dataset')
            config.setState(True, 'resync', detail='parameters')
            config.set('dataset hash', dataset_hash_new)
Exemple #18
0
    def __init__(self, config, datasetExpr, datasetNick=None):
        DataProvider.__init__(self, config, datasetExpr, datasetNick)

        ds_config = config.changeView(
            viewClass='SimpleConfigView',
            setSections=['datasource %s' % datasetExpr])
        self._block = self._readBlockFromConfig(ds_config, datasetExpr,
                                                datasetNick)

        def onChange(config, old_obj, cur_obj, cur_entry, obj2str):
            self._log.critical('Dataset %r changed', datasetExpr)
            return triggerResync(['datasets',
                                  'parameters'])(config, old_obj, cur_obj,
                                                 cur_entry, obj2str)

        ds_config.get('dataset hash',
                      self.getHash(),
                      persistent=True,
                      onChange=onChange)
Exemple #19
0
    def __init__(self,
                 config,
                 datasource_name,
                 dataset_expr,
                 dataset_nick=None,
                 dataset_proc=None):
        DataProvider.__init__(self, config, datasource_name, dataset_expr,
                              dataset_nick, dataset_proc)

        ds_config = config.change_view(
            view_class='SimpleConfigView',
            set_sections=['datasource %s' % dataset_expr])
        self._block = self._read_block(ds_config, dataset_expr, dataset_nick)

        def _on_change(config, old_obj, cur_obj, cur_entry, obj2str):
            self._log.critical('Dataset %r changed', dataset_expr)
            return TriggerResync(['datasets',
                                  'parameters'])(config, old_obj, cur_obj,
                                                 cur_entry, obj2str)

        ds_config.get('dataset hash',
                      self._get_dataset_hash(),
                      persistent=True,
                      on_change=_on_change)
    def __init__(self, config, datasetExpr, datasetNick, datasetID=0):
        DataProvider.__init__(self, config, datasetExpr, datasetNick, datasetID)

        (path, self._forcePrefix, self._filter) = utils.optSplit(datasetExpr, "@%")
        self._filename = config.resolvePath(path, True, "Error resolving dataset file: %s" % path)
 def __init__(self, config, datasetExpr, datasetNick, providerList):
     DataProvider.__init__(self, config, datasetExpr, datasetNick)
     self._stats = DataProcessor.createInstance('SimpleStatsDataProcessor',
                                                config, None, self._log,
                                                'Summary: Running over ')
     self._providerList = providerList
Exemple #22
0
	def __init__(self, config, datasetExpr, datasetNick, sList):
		DataProvider.__init__(self, config, datasetExpr, datasetNick)
		(self._ds_select, self._ds_name, self._ds_keys_user, self._ds_keys_guard) = self._setup(config, 'dataset')
		(self._b_select, self._b_name, self._b_keys_user, self._b_keys_guard) = self._setup(config, 'block')
		scanList = config.getList('scanner', sList) + ['NullScanner']
		self._scanner = lmap(lambda cls: InfoScanner.createInstance(cls, config), scanList)
	def __init__(self, config, datasetExpr, datasetNick, datasetID, providerList):
		DataProvider.__init__(self, config, datasetExpr, datasetNick, datasetID)
		self._providerList = providerList
		for provider in self._providerList:
			provider.setPassthrough()
	def __init__(self, config, datasetExpr, defaultProvider, datasetID = None):
		# ..., None, None) = Don't override NickName and ID
		DataProvider.__init__(self, config, None, None, None)
		mkProvider = lambda (id, entry): DataProvider.create(config, entry, defaultProvider, id)
		self.subprovider = map(mkProvider, enumerate(filter(lambda x: x != '', datasetExpr.splitlines())))
Exemple #25
0
	def __init__(self, config, datasetExpr, datasetNick, providerList):
		DataProvider.__init__(self, config, datasetExpr, datasetNick)
		self._stats = DataProcessor.createInstance('SimpleStatsDataProcessor', config, None, self._log, 'Summary: Running over ')
		self._providerList = providerList