def _read_block(self, ds_config, dataset_expr, dataset_nick): metadata_name_list = parse_json(ds_config.get('metadata', '[]', on_change=None)) common_metadata = parse_json(ds_config.get('metadata common', '[]', on_change=None)) if len(common_metadata) > len(metadata_name_list): raise DatasetError('Unable to set %d common metadata items ' % len(common_metadata) + 'with %d metadata keys' % len(metadata_name_list)) common_prefix = ds_config.get('prefix', '', on_change=None) fn_list = [] has_events = False has_se_list = False for url in ds_config.get_option_list(): if url == 'se list': has_se_list = True elif url == 'events': has_events = True elif url not in ['dataset hash', 'metadata', 'metadata common', 'nickname', 'prefix']: fi = self._read_fi(ds_config, url, metadata_name_list, common_metadata, common_prefix) fn_list.append(fi) if not fn_list: raise DatasetError('There are no dataset files specified for dataset %r' % dataset_expr) result = { DataProvider.Nickname: ds_config.get('nickname', dataset_nick or '', on_change=None), DataProvider.FileList: sorted(fn_list, key=lambda fi: fi[DataProvider.URL]) } result.update(DataProvider.parse_block_id(dataset_expr)) if metadata_name_list: result[DataProvider.Metadata] = metadata_name_list if has_events: result[DataProvider.NEntries] = ds_config.get_int('events', -1, on_change=None) if has_se_list: result[DataProvider.Locations] = parse_list(ds_config.get('se list', '', on_change=None), ',') return result
def _create_block(self, block_name): result = { DataProvider.Locations: None, DataProvider.FileList: [], self._common_prefix: None, self._common_metadata: [], } result.update(DataProvider.parse_block_id(block_name.lstrip('[').rstrip(']'))) return result
def _create_block(self, block_name): result = { DataProvider.Locations: None, DataProvider.FileList: [], self._common_prefix: None, self._common_metadata: [], } result.update( DataProvider.parse_block_id(block_name.lstrip('[').rstrip(']'))) return result
def _read_block(self, ds_config, dataset_expr, dataset_nick): metadata_name_list = parse_json( ds_config.get('metadata', '[]', on_change=None)) common_metadata = parse_json( ds_config.get('metadata common', '[]', on_change=None)) if len(common_metadata) > len(metadata_name_list): raise DatasetError('Unable to set %d common metadata items ' % len(common_metadata) + 'with %d metadata keys' % len(metadata_name_list)) common_prefix = ds_config.get('prefix', '', on_change=None) fn_list = [] has_events = False has_se_list = False for url in ds_config.get_option_list(): if url == 'se list': has_se_list = True elif url == 'events': has_events = True elif url not in [ 'dataset hash', 'metadata', 'metadata common', 'nickname', 'prefix' ]: fi = self._read_fi(ds_config, url, metadata_name_list, common_metadata, common_prefix) fn_list.append(fi) if not fn_list: raise DatasetError( 'There are no dataset files specified for dataset %r' % dataset_expr) result = { DataProvider.Nickname: ds_config.get('nickname', dataset_nick or '', on_change=None), DataProvider.FileList: sorted(fn_list, key=lambda fi: fi[DataProvider.URL]) } result.update(DataProvider.parse_block_id(dataset_expr)) if metadata_name_list: result[DataProvider.Metadata] = metadata_name_list if has_events: result[DataProvider.NEntries] = ds_config.get_int('events', -1, on_change=None) if has_se_list: result[DataProvider.Locations] = parse_list( ds_config.get('se list', '', on_change=None), ',') return result