def get(self, prepid=None): """ Endpoint for getting information on which relval fields are editable """ if prepid: prepid = clean_split(prepid, ',') if len(prepid) == 1: # Return one object if there is only one prepid relval = relval_controller.get(prepid[0]) editing_info = relval_controller.get_editing_info(relval) relval = relval.get_json() else: # Return a list if there are multiple prepids relval = [relval_controller.get(p) for p in prepid] editing_info = [ relval_controller.get_editing_info(r) for r in relval ] relval = [r.get_json() for r in relval] else: relval = RelVal() editing_info = relval_controller.get_editing_info(relval) relval = relval.get_json() return self.output_text({ 'response': { 'object': relval, 'editing_info': editing_info }, 'success': True, 'message': '' })
def get(self, prepid=None): """ Get an object and it's editing info or list of objects and their editing infos """ if prepid: prepid = clean_split(prepid, ',') if len(prepid) == 1: # Return one object if there is only one prepid request = request_controller.get(prepid[0]) editing_info = request_controller.get_editing_info(request) request = request.get_json() else: # Return a list if there are multiple prepids request = [request_controller.get(p) for p in prepid] editing_info = [ request_controller.get_editing_info(r) for r in request ] request = [r.get_json() for r in request] else: request = Request() editing_info = request_controller.get_editing_info(request) request = request.get_json() return self.output_text({ 'response': { 'object': request, 'editing_info': editing_info }, 'success': True, 'message': '' })
def __upload_configs(self, request, ssh_executor, remote_directory): """ SSH to a remote machine and upload cmsDriver config files to ReqMgr2 """ prepid = request.get_prepid() self.logger.debug('Will upload configs for %s', prepid) command = [ f'cd {remote_directory}', 'chmod +x config_upload.sh', 'export X509_USER_PROXY=$(pwd)/proxy.txt', './config_upload.sh' ] stdout, stderr, exit_code = ssh_executor.execute_command(command) if exit_code != 0: raise Exception(f'Error uploading configs for {prepid}.\n{stderr}') stdout = [x for x in clean_split(stdout, '\n') if 'DocID' in x] # Get all lines that have DocID as tuples split by space stdout = [tuple(clean_split(x.strip(), ' ')[1:]) for x in stdout] return stdout
def make_relval_step(self, step_dict): """ Remove, split or move arguments in step dictionary returned from run_the_matrix_pdmv.py """ # Deal with input file part input_dict = step_dict.get('input', {}) input_dict.pop('events', None) # Deal with driver part arguments = step_dict.get('arguments', {}) # Remove unneeded arguments for to_pop in ('--filein', '--fileout', '--lumiToProcess'): arguments.pop(to_pop, None) # Split comma separated items into lists for to_split in ('--step', '--eventcontent', '--datatier'): arguments[to_split] = clean_split(arguments.get(to_split, '')) # Put all arguments that are not in schema to extra field driver_schema = RelValStep.schema()['driver'] driver_keys = {f'--{key}' for key in driver_schema.keys()} extra = '' for key, value in arguments.items(): if key == 'fragment_name': continue if key not in driver_keys: if isinstance(value, bool): if value: extra += f' {key}' elif isinstance(value, list): if value: extra += f' {key} {",".join(value)}' else: if value: extra += f' {key} {value}' arguments['extra'] = extra.strip() arguments = {k.lstrip('-'): v for k, v in arguments.items()} # Create a step name = step_dict['name'] # Delete INPUT from step name if name.endswith('INPUT'): name = name[:-5] new_step = {'name': name, 'lumis_per_job': step_dict.get('lumis_per_job', ''), 'events_per_lumi': step_dict.get('events_per_lumi', ''), 'driver': arguments, 'input': input_dict} self.logger.debug('Step dict: %s', json.dumps(new_step, indent=2)) return new_step
def upload_configs(self, relval, ssh_executor, workspace_dir): """ SSH to a remote machine and upload cmsDriver config files to ReqMgr2 """ prepid = relval.get_prepid() command = [ f'cd {workspace_dir}', 'export WORKSPACE_DIR=$(pwd)', f'cd {prepid}', 'export RELVAL_DIR=$(pwd)', 'chmod +x config_upload.sh', 'export X509_USER_PROXY=$(pwd)/proxy.txt', './config_upload.sh' ] stdout, stderr, exit_code = ssh_executor.execute_command(command) self.logger.debug('Exit code %s for %s config upload', exit_code, prepid) if exit_code != 0: raise Exception(f'Error uploading configs for {prepid}.\n{stderr}') stdout = [x for x in clean_split(stdout, '\n') if 'DocID' in x] # Get all lines that have DocID as tuples split by space stdout = [tuple(clean_split(x.strip(), ' ')[1:]) for x in stdout] return stdout
def get(self): """ Get a single with given prepid """ query = request.args.get('q') if not query: raise Exception('No input was supplied') exclude = request.args.get('exclude', '') exclude = clean_split(exclude) obj = ticket_controller.get_datasets(query, exclude) return self.output_text({ 'response': obj, 'success': True, 'message': '' })
def resolve_auto_conditions(self, conditions_tree): """ Iterate through conditions tree and resolve global tags Conditions tree example: { "CMSSW_11_2_0_pre9": { "slc7_a_b_c": { "auto:phase1_2021_realistic": None } } } """ self.logger.debug('Resolve auto conditions of:\n%s', json.dumps(conditions_tree, indent=2)) credentials_file = Config.get('credentials_file') remote_directory = Config.get('remote_path').rstrip('/') command = [f'cd {remote_directory}'] for cmssw_version, scram_tree in conditions_tree.items(): for scram_arch, conditions in scram_tree.items(): # Setup CMSSW environment # No need to explicitly reuse CMSSW as this happens in relval_submission directory os_name, _, gcc_version = clean_split(scram_arch, '_') amd_scram_arch = f'{os_name}_amd64_{gcc_version}' command.extend( cmssw_setup(cmssw_version, scram_arch=amd_scram_arch).split('\n')) conditions_str = ','.join(list(conditions.keys())) command += [( 'python3 resolve_auto_global_tag.py ' + f'"{cmssw_version}" "{scram_arch}" "{conditions_str}" || exit $?' )] self.logger.debug('Resolve auto conditions command:\n%s', '\n'.join(command)) with SSHExecutor('lxplus.cern.ch', credentials_file) as ssh_executor: # Upload python script to resolve auto globaltag by upload script stdout, stderr, exit_code = ssh_executor.execute_command( f'mkdir -p {remote_directory}') if exit_code != 0: self.logger.error('Error creating %s:\nstdout:%s\nstderr:%s', remote_directory, stdout, stderr) raise Exception(f'Error creting remote directory: {stderr}') ssh_executor.upload_file( './core/utils/resolve_auto_global_tag.py', f'{remote_directory}/resolve_auto_global_tag.py') stdout, stderr, exit_code = ssh_executor.execute_command(command) if exit_code != 0: self.logger.error( 'Error resolving auto global tags:\nstdout:%s\nstderr:%s', stdout, stderr) raise Exception(f'Error resolving auto globaltags: {stderr}') tags = [ x for x in clean_split(stdout, '\n') if x.startswith('GlobalTag:') ] for resolved_tag in tags: split_resolved_tag = clean_split(resolved_tag, ' ') cmssw_version = split_resolved_tag[1] scram_arch = split_resolved_tag[2] conditions = split_resolved_tag[3] resolved = split_resolved_tag[4] self.logger.debug('Resolved %s to %s in %s (%s)', conditions, resolved, cmssw_version, scram_arch) conditions_tree[cmssw_version][scram_arch][conditions] = resolved
def get_config_upload_file(self, relval, for_submission=False): """ Get bash script that would upload config files to ReqMgr2 """ self.logger.debug('Getting config upload script for %s', relval.get_prepid()) database_url = Config.get('cmsweb_url').replace('https://', '').replace( 'http://', '') command = '#!/bin/bash\n\n' command += 'export SINGULARITY_CACHEDIR="/tmp/$(whoami)/singularity"\n' command += '\n' # Check if all expected config files are present common_check_part = 'if [ ! -s "%s.py" ]; then\n' common_check_part += ' echo "File %s.py is missing" >&2\n' common_check_part += ' exit 1\n' common_check_part += 'fi\n' for step in relval.get('steps'): # Run config check config_name = step.get_config_file_name() if config_name: command += common_check_part % (config_name, config_name) # Use ConfigCacheLite and TweakMakerLite instead of WMCore command += '\n' command += config_cache_lite_setup() command += '\n\n' # Upload command will be identical for all configs common_upload_part = ( '\npython3 config_uploader.py --file $(pwd)/%s.py --label %s ' f'--group ppd --user $(echo $USER) --db {database_url} || exit $?') previous_step_cmssw = None previous_step_scram = None container_code = '' container_steps = [] default_os = 'slc7_' for index, step in enumerate(relval.get('steps')): # Run config uploader config_name = step.get_config_file_name() if not config_name: continue step_cmssw = step.get_release() real_scram_arch = step.get_scram_arch() os_name, _, gcc_version = clean_split(real_scram_arch, '_') scram_arch = f'{os_name}_amd64_{gcc_version}' if step_cmssw != previous_step_cmssw or scram_arch != previous_step_scram: if container_code: if not previous_step_scram.startswith(default_os): container_script_name = f'upload-steps-{"-".join(container_steps)}' container_code = run_commands_in_singularity( container_code, previous_step_scram, container_script_name) container_code = '\n'.join(container_code) command += container_code.strip() command += '\n\n\n' container_code = '' container_steps = [] if real_scram_arch != scram_arch: container_code += f'# Real scram arch is {real_scram_arch}\n' container_code += cmssw_setup(step_cmssw, scram_arch=scram_arch) container_code += '\n' container_code += common_upload_part % (config_name, config_name) container_code += '\n' container_steps.append(str(index + 1)) previous_step_cmssw = step_cmssw previous_step_scram = scram_arch if not scram_arch.startswith(default_os): container_script_name = f'upload-steps-{"-".join(container_steps)}' container_code = run_commands_in_singularity( container_code, scram_arch, container_script_name) container_code = '\n'.join(container_code) command += container_code return command.strip()