def discover_blocks(options):
	# Get work directory, create dbs dump directory
	if os.path.isdir(options.args[0]):
		work_dn = os.path.abspath(os.path.normpath(options.args[0]))
	else:
		work_dn = gc_create_config(config_file=options.args[0]).get_work_path()
	if not options.opts.tempdir:
		options.opts.tempdir = os.path.join(work_dn, 'dbs')
	if not os.path.exists(options.opts.tempdir):
		os.mkdir(options.opts.tempdir)

	# get provider with dataset information
	config = gc_create_config(config_dict={'dataset': options.config_dict}, load_old_config=False)
	if options.opts.input_file:
		provider = DataProvider.create_instance('ListProvider',
			config, 'dataset', options.opts.input_file)
	else:
		provider = DataProvider.create_instance('DBSInfoProvider',
			config, 'dataset', options.args[0])

	blocks = provider.get_block_list_cached(show_stats=False)
	DataProvider.save_to_file(os.path.join(options.opts.tempdir, 'dbs.dat'), blocks)
	if options.opts.discovery:
		sys.exit(os.EX_OK)
	return blocks
def discover_blocks(options):
    # Get work directory, create dbs dump directory
    if os.path.isdir(options.args[0]):
        work_dn = os.path.abspath(os.path.normpath(options.args[0]))
    else:
        work_dn = gc_create_config(config_file=options.args[0]).get_work_path()
    if not options.opts.tempdir:
        options.opts.tempdir = os.path.join(work_dn, 'dbs')
    if not os.path.exists(options.opts.tempdir):
        os.mkdir(options.opts.tempdir)

    # get provider with dataset information
    config = gc_create_config(config_dict={'dataset': options.config_dict},
                              load_old_config=False)
    if options.opts.input_file:
        provider = DataProvider.create_instance('ListProvider', config,
                                                'dataset',
                                                options.opts.input_file)
    else:
        provider = DataProvider.create_instance('DBSInfoProvider', config,
                                                'dataset', options.args[0])

    blocks = provider.get_block_list_cached(show_stats=False)
    DataProvider.save_to_file(os.path.join(options.opts.tempdir, 'dbs.dat'),
                              blocks)
    if options.opts.discovery:
        sys.exit(os.EX_OK)
    return blocks
Beispiel #3
0
def setup_config(opts, args):
    # Set config based on settings from config file or command line
    config_fn = None
    if os.path.exists(args[0]):
        config_fn = args[0]
    config = gc_create_config(config_file=config_fn).change_view(
        set_sections=['global'])
    if os.path.exists(config.get_work_path('datamap.tar')):
        opts.dataset = config.get_work_path('datamap.tar')
    config.change_view(set_sections=['jobs']).set('nseeds', '1', '?=')
    param_config = config.change_view(set_sections=['parameters'])
    if opts.parameter:
        logging.info('Provided options:')
        for param in opts.parameter:
            key, value = param.split('=', 1)
            param_config.set(key.strip(),
                             value.strip().replace('\\n', '\n'), '=')
            logging.info('\t%s: %s', key.strip(), value.strip())
        logging.info('')

    if config_fn is None:
        param_config.set('parameters',
                         str.join(' ', args).replace('\\n', '\n'))
        if opts.dataset:
            param_config.set('default lookup', 'DATASETNICK')
        if opts.verbose > 2:
            buffer = StringBuffer()
            config.change_view(set_sections=None).write(buffer)
            logging.getLogger('script').info(buffer.getvalue().rstrip())
    return config
def setup_config(opts, args):
	# Set config based on settings from config file or command line
	config_fn = None
	if os.path.exists(args[0]):
		config_fn = args[0]
	config = gc_create_config(config_file=config_fn).change_view(set_sections=['global'])
	if os.path.exists(config.get_work_path('datamap.tar')):
		opts.dataset = config.get_work_path('datamap.tar')
	config.change_view(set_sections=['jobs']).set('nseeds', '1', '?=')
	param_config = config.change_view(set_sections=['parameters'])
	if opts.parameter:
		logging.info('Provided options:')
		for param in opts.parameter:
			key, value = param.split('=', 1)
			param_config.set(key.strip(), value.strip().replace('\\n', '\n'), '=')
			logging.info('\t%s: %s', key.strip(), value.strip())
		logging.info('')

	if config_fn is None:
		param_config.set('parameters', str.join(' ', args).replace('\\n', '\n'))
		if opts.dataset:
			param_config.set('default lookup', 'DATASETNICK')
		if opts.verbose > 2:
			buffer = StringBuffer()
			config.change_view(set_sections=None).write(buffer)
			logging.getLogger('script').info(buffer.getvalue().rstrip())
	return config
def get_job_db(options):
	if len(options.args) != 1:
		options.parser.exit_with_usage(options.parser.usage('jobs'))
	config = gc_create_config(config_file=options.args[0])
	# Initialise task module
	task_cls_name = config.get(['task', 'module'])
	task = Plugin.create_instance(task_cls_name, config, task_cls_name)
	job_db = Plugin.create_instance('TextFileJobDB', config)
	selected = JobSelector.create(options.opts.job_selector, task=task)
	logging.info('Matching jobs: ' + str.join(' ', imap(str, job_db.iter_jobs(selected))))
Beispiel #6
0
def get_job_db(options):
    if len(options.args) != 1:
        options.parser.exit_with_usage(options.parser.usage('jobs'))
    config = gc_create_config(config_file=options.args[0])
    # Initialise task module
    task_cls_name = config.get(['task', 'module'])
    task = Plugin.create_instance(task_cls_name, config, task_cls_name)
    job_db = Plugin.create_instance('TextFileJobDB', config)
    selected = JobSelector.create(options.opts.job_selector, task=task)
    logging.info('Matching jobs: ' +
                 str.join(' ', imap(str, job_db.iter_jobs(selected))))
Beispiel #7
0
def _main():
    parser = ScriptOptions(usage='%s [OPTIONS] <DBS dataset path>')
    parser.add_text(None,
                    '',
                    'producer',
                    default='SimpleNickNameProducer',
                    help='Name of the nickname producer')
    parser.add_bool(None,
                    'L',
                    'nick-list',
                    default=False,
                    help='List available nickname producer classes')
    options = parser.script_parse()

    if options.opts.nick_list:
        display_plugin_list_for('NickNameProducer',
                                title='Available nickname producer classes')
    if not options.args:
        parser.exit_with_usage()

    dataset_path = options.args[0]
    if ('*' in dataset_path) or os.path.exists(dataset_path):
        dataset_provider = 'DBS3Provider'
        if os.path.exists(dataset_path):
            dataset_provider = 'ListProvider'
        provider = Plugin.create_instance(dataset_provider, gc_create_config(),
                                          'dataset', dataset_path)
        dataset_path_list = provider.get_dataset_name_list()
    else:
        dataset_path_list = [dataset_path]

    nn_prod = Plugin.get_class('NickNameProducer').create_instance(
        options.opts.producer, gc_create_config(), 'dataset')
    ConsoleTable.create([(1, 'Dataset'), (0, 'Nickname')],
                        lmap(
                            lambda ds: {
                                0: nn_prod.get_name('', ds, None),
                                1: ds
                            }, dataset_path_list), 'll')
def get_dataset_config(opts, args):
	dataset = str.join('\n', args)
	if (not opts.provider) and not os.path.exists(clean_path(dataset.split()[0])):
		opts.provider = 'DBS3Provider'
	elif not opts.provider:
		opts.provider = 'ListProvider'
	config_dict = {'dbs blacklist T1 *': 'False', 'remove empty blocks *': 'False',
		'remove empty files *': 'False', 'location format *': opts.location,
		'nickname check collision *': 'False', 'dataset location filter': opts.location_filter,
		'dataset *': dataset, 'dataset provider *': opts.provider}
	if opts.metadata or opts.list_metadata or opts.list_metadata_common:
		config_dict['lumi filter *'] = '-'
		config_dict['keep lumi metadata *'] = 'True'
	return gc_create_config(config_file=opts.settings, config_dict={'dataset': config_dict})
def backend_list(finder_name):
	finder = Plugin.get_class('BackendDiscovery').create_instance(finder_name, gc_create_config())
	item_dict_list = []
	item_key_set = set()
	for item_dict in finder.discover():
		nice_item_dict = {}
		for (key, value) in item_dict.items():
			if isinstance(key, int):
				key = WMS.enum2str(key)
			nice_item_dict[key] = value
			item_key_set.add(key)
		item_dict_list.append(nice_item_dict)
	item_key_set.remove('name')
	item_key_list = sorted(item_key_set)
	ConsoleTable.create([('name', 'Name')] + lzip(item_key_list, item_key_list), item_dict_list)
def _lookup_pfn(options):
	if '<hypernews_name>' in options.opts.lfn:
		token = Plugin.get_class('AccessToken').create_instance('VomsProxy', gc_create_config(), 'token')
		site_db = SiteDB()
		hn_name = site_db.dn_to_username(dn=token.get_fq_user_name())
		if not hn_name:
			raise Exception('Unable to map grid certificate to hypernews name!')
		options.opts.lfn = options.opts.lfn.replace('<hypernews_name>', hn_name)

	tmp = _lfn2pfn(node=options.opts.se, prot=options.opts.se_prot, lfn=options.opts.lfn)
	for entry in tmp:
		entry_str = entry['pfn']
		if len(tmp) > 1:  # write node name if multiple mappings are returned
			entry_str = '%s %s' % (entry['node'], entry_str)
		logging.getLogger('script').info(entry_str)
def discover_dataset(provider_name, config_dict):
	buffer = StringBuffer()
	config = gc_create_config(config_dict={'dataset': config_dict})
	config = config.change_view(set_sections=['dataset'])
	provider = Plugin.get_class('DataProvider').create_instance(provider_name,
		config, 'dataset', config_dict['dataset'], None)
	if config_dict['dump config'] == 'True':
		config.write(buffer, print_default=True, print_minimal=True)
		return logging.getLogger('script').info(buffer.getvalue().rstrip())
	strip_metadata = config_dict['strip'] == 'True'
	block_iter = provider.get_block_list_cached(show_stats=False)
	if config_dict['output']:
		return provider.save_to_file(config_dict['output'], block_iter, strip_metadata)
	for _ in provider.save_to_stream(buffer, block_iter, strip_metadata):
		pass
	logging.getLogger('script').info(buffer.getvalue().rstrip())
Beispiel #12
0
def discover_dataset(provider_name, config_dict):
    buffer = StringBuffer()
    config = gc_create_config(config_dict={'dataset': config_dict})
    config = config.change_view(set_sections=['dataset'])
    provider = Plugin.get_class('DataProvider').create_instance(
        provider_name, config, 'dataset', config_dict['dataset'], None)
    if config_dict['dump config'] == 'True':
        config.write(buffer, print_default=True, print_minimal=True)
        return logging.getLogger('script').info(buffer.getvalue().rstrip())
    strip_metadata = config_dict['strip'] == 'True'
    block_iter = provider.get_block_list_cached(show_stats=False)
    if config_dict['output']:
        return provider.save_to_file(config_dict['output'], block_iter,
                                     strip_metadata)
    for _ in provider.save_to_stream(buffer, block_iter, strip_metadata):
        pass
    logging.getLogger('script').info(buffer.getvalue().rstrip())
Beispiel #13
0
def backend_list(finder_name):
    finder = Plugin.get_class('BackendDiscovery').create_instance(
        finder_name, gc_create_config())
    item_dict_list = []
    item_key_set = set()
    for item_dict in finder.discover():
        nice_item_dict = {}
        for (key, value) in item_dict.items():
            if isinstance(key, int):
                key = WMS.enum2str(key)
            nice_item_dict[key] = value
            item_key_set.add(key)
        item_dict_list.append(nice_item_dict)
    item_key_set.remove('name')
    item_key_list = sorted(item_key_set)
    ConsoleTable.create([('name', 'Name')] +
                        lzip(item_key_list, item_key_list), item_dict_list)
def get_dataset_config(opts, args):
    dataset = str.join('\n', args)
    if (not opts.provider) and not os.path.exists(
            clean_path(dataset.split()[0])):
        opts.provider = 'DBS3Provider'
    elif not opts.provider:
        opts.provider = 'ListProvider'
    config_dict = {
        'dbs blacklist T1 *': 'False',
        'remove empty blocks *': 'False',
        'remove empty files *': 'False',
        'location format *': opts.location,
        'nickname check collision *': 'False',
        'dataset location filter': opts.location_filter,
        'dataset *': dataset,
        'dataset provider *': opts.provider
    }
    if opts.metadata or opts.list_metadata or opts.list_metadata_common:
        config_dict['lumi filter *'] = '-'
        config_dict['keep lumi metadata *'] = 'True'
    return gc_create_config(config_file=opts.settings,
                            config_dict={'dataset': config_dict})