def curate_data(self, run_settings, output_location, experiment_id): ''' Creates experiment in MyTardis ''' # Loading MyTardis credentials bdp_username = getval(run_settings, '%s/bdp_userprofile/username' % SCHEMA_PREFIX) mytardis_url = getval(run_settings, '%s/input/mytardis/mytardis_platform' % SCHEMA_PREFIX) mytardis_settings = manage.get_platform_settings(mytardis_url, bdp_username) def _get_experiment_name(path): ''' Return the name for MyTardis experiment e.g., if path='x/y/z', returns 'y/z' ''' return str(os.sep.join(path.split(os.sep)[-2:])) # Creates new experiment if experiment_id=0 # If experiment_id is non-zero, the experiment is updated experiment_id = mytardis.create_experiment( settings=mytardis_settings, # MyTardis credentials exp_id=experiment_id, expname=_get_experiment_name(output_location), # name of the experiment in MyTardis # metadata associated with the experiment # a list of parameter sets experiment_paramset=[ # a new blank parameter set conforming to schema 'remotemake' mytardis.create_paramset("remotemake", []), # a graph parameter set mytardis.create_graph_paramset("expgraph", # name of schema name="randexp1", # unique graph name graph_info={"axes":["x", "y"], "legends":["Random points"]}, # information about the graph value_dict={}, # values to be used in parent graphs if appropriate value_keys=[["randdset/x", "randdset/y"]]), # values from datasets to produce points in the graph ]) return experiment_id
def process(self, run_settings): try: self.id = int(getval(run_settings, '%s/system/id' % django_settings.SCHEMA_PREFIX)) except (SettingNotFoundException, ValueError): self.id = 0 try: self.created_nodes = ast.literal_eval(getval( run_settings, '%s/stages/create/created_nodes' % django_settings.SCHEMA_PREFIX)) except (SettingNotFoundException, ValueError): self.created_nodes = [] try: self.scheduled_nodes = ast.literal_eval(getval( run_settings, '%s/stages/schedule/scheduled_nodes' % django_settings.SCHEMA_PREFIX)) except (SettingNotFoundException, ValueError): self.scheduled_nodes = [] try: self.bootstrapped_nodes = ast.literal_eval(getval( run_settings, '%s/stages/bootstrap/bootstrapped_nodes' % django_settings.SCHEMA_PREFIX)) except (SettingNotFoundException, ValueError): self.bootstrapped_nodes = [] #messages.info(run_settings, "%d: destroy" % self.id) comp_pltf_settings = self.get_platform_settings( run_settings, '%s/platform/computation' % django_settings.SCHEMA_PREFIX) try: platform_type = comp_pltf_settings['platform_type'] except KeyError, e: logger.error(e) messages.error(run_settings, e) return
def is_triggered(self, run_settings): try: self.created_nodes = ast.literal_eval(getval(run_settings, '%s/stages/create/created_nodes' % RMIT_SCHEMA)) running_created_nodes = [x for x in self.created_nodes if str(x[3]) == 'running'] logger.debug('running_created_nodes=%s' % running_created_nodes) if len(running_created_nodes) == 0: return False except (SettingNotFoundException, ValueError) as e: logger.debug(e) return False try: bootstrap_done = int(getval(run_settings, '%s/stages/bootstrap/bootstrap_done' % RMIT_SCHEMA)) if not bootstrap_done: return False except (SettingNotFoundException, ValueError) as e: logger.debug(e) return False try: self.bootstrapped_nodes = ast.literal_eval(getval( run_settings, '%s/stages/bootstrap/bootstrapped_nodes' % RMIT_SCHEMA)) if len(self.bootstrapped_nodes) == 0: return False except (SettingNotFoundException, ValueError) as e: return False try: reschedule_str = getval(run_settings, '%s/stages/schedule/procs_2b_rescheduled' % RMIT_SCHEMA) # reschedule_str = run_settings['http://rmit.edu.au/schemas/stages/schedule'][u'procs_2b_rescheduled'] self.procs_2b_rescheduled = ast.literal_eval(reschedule_str) except SettingNotFoundException, e: # FIXME: when is procs_2b_rescheduled set? logger.debug(e) self.procs_2b_rescheduled = []
def setup_output(self, run_settings): self.output_platform_name = '' self.output_platform_offset = '' self.output_loc_offset = '' if self.output_exists(run_settings): logger.debug('special=%s' % run_settings) try: run_settings['%s/platform/storage/output' % django_settings.SCHEMA_PREFIX] except KeyError: logger.debug('bdp_url settings ...') try: bdp_url = getval(run_settings, django_settings.SCHEMA_PREFIX + '/input/system/output_location') logger.debug('bdp_url=%s' % bdp_url) except SettingNotFoundException: try: bdp_url = getval(run_settings, django_settings.SCHEMA_PREFIX + '/input/location/output_location') logger.debug('bdp_url=%s' % bdp_url) except SettingNotFoundException: bdp_url = getval(run_settings, django_settings.SCHEMA_PREFIX + '/input/location/output/output_location') logger.debug('bdp_url=%s' % bdp_url) self.output_platform_name, self.output_platform_offset = self.break_bdp_url(bdp_url) run_settings[django_settings.SCHEMA_PREFIX + '/platform/storage/output'] = {} run_settings[django_settings.SCHEMA_PREFIX + '/platform/storage/output'][ 'platform_url'] = self.output_platform_name run_settings[django_settings.SCHEMA_PREFIX + '/platform/storage/output']['offset'] = self.output_platform_offset
def set_execute_settings(self, run_settings, local_settings): self.set_domain_settings(run_settings, local_settings) update(local_settings, run_settings, '%s/stages/setup/payload_destination' % django_settings.SCHEMA_PREFIX, '%s/stages/setup/filename_for_PIDs' % django_settings.SCHEMA_PREFIX, '%s/stages/setup/process_output_dirname' % django_settings.SCHEMA_PREFIX, '%s/stages/setup/smart_connector_input' % django_settings.SCHEMA_PREFIX, '%s/system/contextid' % django_settings.SCHEMA_PREFIX, '%s/system/random_numbers' % django_settings.SCHEMA_PREFIX, '%s/system/id' % django_settings.SCHEMA_PREFIX ) try: local_settings['curate_data'] = getval(run_settings, '%s/input/mytardis/curate_data' % django_settings.SCHEMA_PREFIX) except SettingNotFoundException: local_settings['curate_data'] = 0 local_settings['bdp_username'] = getval(run_settings, '%s/bdp_userprofile/username' % django_settings.SCHEMA_PREFIX) if '%s/input/system/compplatform/hadoop' % django_settings.SCHEMA_PREFIX in run_settings.keys(): from chiminey.platform import get_platform_settings platform_url = run_settings['%s/platform/computation' % django_settings.SCHEMA_PREFIX]['platform_url'] pltf_settings = get_platform_settings(platform_url, local_settings['bdp_username']) local_settings['root_path'] = '/home/%s' % pltf_settings['username'] local_settings['hadoop_home_path'] = pltf_settings['hadoop_home_path'] logger.debug('root_path=%s' % local_settings['root_path']) else: logger.debug('root_path not found')
def post_mytardis_exp(run_settings, experiment_id, output_location, experiment_paramset=[]): # TODO: move into mytardis package? bdp_username = getval(run_settings, '%s/bdp_userprofile/username' % RMIT_SCHEMA) try: mytardis_url = getval(run_settings, '%s/input/mytardis/mytardis_platform' % RMIT_SCHEMA) except SettingNotFoundException: logger.error("mytardis_platform not set") return 0 mytardis_settings = manage.get_platform_settings( mytardis_url, bdp_username) logger.debug(mytardis_settings) curate_data = getval(run_settings, '%s/input/mytardis/curate_data' % RMIT_SCHEMA) if curate_data: if mytardis_settings['mytardis_host']: def _get_exp_name_for_input(path): return str(os.sep.join(path.split(os.sep)[-1:])) ename = _get_exp_name_for_input(output_location) logger.debug("ename=%s" % ename) experiment_id = mytardis.create_experiment( settings=mytardis_settings, exp_id=experiment_id, experiment_paramset=experiment_paramset, expname=ename) else: logger.warn("no mytardis host specified") else: logger.warn('Data curation is off') return experiment_id
def process(self, run_settings): try: self.rand_index = int(getval(run_settings, '%s/stages/run/rand_index' % RMIT_SCHEMA)) except SettingNotFoundException: try: self.rand_index = int(getval(run_settings, '%s/input/hrmc/iseed' % RMIT_SCHEMA)) except SettingNotFoundException, e: self.rand_index = 42 logger.debug(e)
def create_dataset_for_intermediate_output(self, run_settings, experiment_id, base_dir, output_url, all_settings, outputs=[]): logger.debug('self_outpus_curate=%s' % outputs) iteration = int(getval(run_settings, '%s/system/id' % self.SCHEMA_PREFIX)) iter_output_dir = os.path.join(os.path.join(base_dir, "output_%s" % iteration)) output_prefix = '%s://%s@' % (all_settings['scheme'], all_settings['type']) iter_output_dir = "%s%s" % (output_prefix, iter_output_dir) (scheme, host, mypath, location, query_settings) = storage.parse_bdpurl(output_url) fsys = storage.get_filesystem(output_url) node_output_dirnames, _ = fsys.listdir(mypath) logger.debug("node_output_dirnames=%s" % node_output_dirnames) if all_settings['mytardis_host']: output_dirs = [] for m, dir_name in enumerate(node_output_dirnames): output_dirs.append(os.path.join(iter_output_dir, dir_name)) for i, output_dir in enumerate(output_dirs): dataset_paramset = [] datafile_paramset = [] dfile_extract_func = {} self.load_metadata_builder(run_settings) if self.METADATA_BUILDER: (continue_loop, dataset_paramset, datafile_paramset, dfile_extract_func) = \ self.METADATA_BUILDER.build_metadata_for_intermediate_output(\ output_dir, outputs, run_settings=run_settings, storage_settings=all_settings,\ output_dirs=output_dirs) if continue_loop: continue source_dir_url = get_url_with_credentials( all_settings, output_dir, is_relative_path=False) logger.debug("source_dir_url=%s" % source_dir_url) logger.debug('all_settings_here=%s' % all_settings) system_id = int(getval(run_settings, '%s/system/id' % self.SCHEMA_PREFIX)) #TODO Mytardis experiment_id = mytardis.create_dataset( settings=all_settings, source_url=source_dir_url, exp_id=experiment_id, exp_name=mytardis.get_exp_name_for_intermediate_output, dataset_name=mytardis.get_dataset_name_for_output, dataset_paramset=dataset_paramset, datafile_paramset=datafile_paramset, dfile_extract_func=dfile_extract_func ) else: logger.warn("no mytardis host specified") return 0 return experiment_id
def input_exists(self, run_settings): try: getval(run_settings, 'http://rmit.edu.au/schemas/input/location/input/input_location') return 'http://rmit.edu.au/schemas/input/location/input/input_location' except SettingNotFoundException: pass try: getval(run_settings, 'http://rmit.edu.au/schemas/input/system/input_location') return 'http://rmit.edu.au/schemas/input/system/input_location' except SettingNotFoundException: pass return ""
def get_internal_sweep_map(self, settings, **kwargs): local_settings = settings.copy() run_settings = kwargs['run_settings'] logger.debug('run_settings=%s' % run_settings) #fixme remove rand index try: rand_index = int(getval(run_settings, '%s/stages/run/rand_index' % django_settings.SCHEMA_PREFIX)) except SettingNotFoundException: try: rand_index = int(getval(run_settings, '%s/input/hrmc/iseed' % django_settings.SCHEMA_PREFIX)) except SettingNotFoundException, e: rand_index = 42 logger.debug(e)
def is_triggered(self, run_settings): """ Triggered when we now that we have N nodes setup and ready to run. input_dir is assumed to be populated. """ try: schedule_completed = int(getval( run_settings, '%s/stages/schedule/schedule_completed' % django_settings.SCHEMA_PREFIX)) self.all_processes = ast.literal_eval( getval(run_settings, '%s/stages/schedule/all_processes' % django_settings.SCHEMA_PREFIX)) except SettingNotFoundException, e: return False
def is_triggered(self, run_settings): try: converged = int(getval(run_settings, '%s/stages/converge/converged' % django_settings.SCHEMA_PREFIX)) logger.debug("converged=%s" % converged) except (ValueError, SettingNotFoundException) as e: return False if converged: try: run_finished = int(getval(run_settings, '%s/stages/destroy/run_finished' % django_settings.SCHEMA_PREFIX)) except (ValueError, SettingNotFoundException) as e: return True return not run_finished return False
def set_create_settings(self, run_settings, local_settings): update(local_settings, run_settings, '%s/stages/create/vm_image' % RMIT_SCHEMA, '%s/stages/create/cloud_sleep_interval' % RMIT_SCHEMA, '%s/system/contextid' % RMIT_SCHEMA ) try: local_settings['min_count'] = int(getval( run_settings, '%s/input/system/cloud/minimum_number_vm_instances' % RMIT_SCHEMA)) except SettingNotFoundException: local_settings['min_count'] = 1 try: local_settings['max_count'] = int(getval( run_settings, '%s/input/system/cloud/number_vm_instances' % RMIT_SCHEMA)) except SettingNotFoundException: local_settings['max_count'] = 1
def curate_data(self, run_settings, location, experiment_id): logger.debug("vasp durate_data") try: subdirective = getval(run_settings, '%s/stages/sweep/directive' % SCHEMA_PREFIX) except SettingNotFoundException: logger.warn("cannot find subdirective name") subdirective = '' if subdirective == "vasp": bdp_username = getval(run_settings, '%s/bdp_userprofile/username' % SCHEMA_PREFIX) mytardis_url = run_settings['http://rmit.edu.au/schemas/input/mytardis']['mytardis_platform'] mytardis_settings = manage.get_platform_settings(mytardis_url, bdp_username) logger.debug(mytardis_settings) def _get_exp_name_for_input(path): return str(os.sep.join(path.split(os.sep)[-2:])) ename = _get_exp_name_for_input(location) experiment_id = mytardis.create_experiment( settings=mytardis_settings, exp_id=experiment_id, expname=ename, experiment_paramset=[ mytardis.create_paramset("remotemake", []), mytardis.create_graph_paramset("expgraph", name="makeexp1", graph_info={"axes":["num_kp", "energy"], "legends":["TOTEN"]}, value_dict={}, value_keys=[["makedset/num_kp", "makedset/toten"]]), mytardis.create_graph_paramset("expgraph", name="makeexp2", graph_info={"axes":["encut", "energy"], "legends":["TOTEN"]}, value_dict={}, value_keys=[["makedset/encut", "makedset/toten"]]), mytardis.create_graph_paramset("expgraph", name="makeexp3", graph_info={"axes":["num_kp", "encut", "TOTEN"], "legends":["TOTEN"]}, value_dict={}, value_keys=[["makedset/num_kp", "makedset/encut", "makedset/toten"]]), ]) else: logger.warn("cannot find subdirective name") return experiment_id
def set_destroy_settings(self, run_settings, local_settings): update(local_settings, run_settings, '%s/stages/create/cloud_sleep_interval' % RMIT_SCHEMA, '%s/system/contextid' % RMIT_SCHEMA, '%s/stages/create/created_nodes' % RMIT_SCHEMA ) local_settings['bdp_username'] = getval(run_settings, '%s/bdp_userprofile/username' % RMIT_SCHEMA)
def get_total_templates(self, maps, **kwargs): run_settings = kwargs['run_settings'] output_storage_settings = kwargs['output_storage_settings'] job_dir = kwargs['job_dir'] try: id = int(getval(run_settings, '%s/system/id' % RMIT_SCHEMA)) except (SettingNotFoundException, ValueError) as e: logger.debug(e) id = 0 iter_inputdir = os.path.join(job_dir, "input_%s" % id) url_with_pkey = get_url_with_credentials( output_storage_settings, '%s://%s@%s' % (output_storage_settings['scheme'], output_storage_settings['type'], iter_inputdir), is_relative_path=False) logger.debug(url_with_pkey) input_dirs = list_dirs(url_with_pkey) for iter, template_map in enumerate(maps): logger.debug("template_map=%s" % template_map) map_keys = template_map.keys() logger.debug("map_keys %s" % map_keys) map_ranges = [list(template_map[x]) for x in map_keys] product = 1 for i in map_ranges: product = product * len(i) total_templates = product * len(input_dirs) logger.debug("total_templates=%d" % (total_templates)) return total_templates
def is_triggered(self, run_settings): try: upload_makefile_done = int(getval(run_settings, '%s/stages/upload_makefile/done' % RMIT_SCHEMA)) except (ValueError, SettingNotFoundException): return False if upload_makefile_done: try: program_success = int(getval(run_settings, '%s/stages/make/program_success' % RMIT_SCHEMA)) except (ValueError, SettingNotFoundException): return True logger.debug("program_success") return not program_success return False
def _submit_subdirective(platform, run_settings, user, parentcontext): try: subdirective_name = getval(run_settings, '%s/stages/sweep/directive' % RMIT_SCHEMA) except SettingNotFoundException: logger.warn("cannot find subdirective_name name") raise directive_args = [] for schema in get_schema_namespaces(run_settings): keys = getvals(run_settings, schema) d = [] logger.debug("keys=%s" % keys) for k, v in keys.items(): d.append((k, v)) d.insert(0, schema) directive_args.append(d) directive_args.insert(0, '') directive_args = [directive_args] logger.debug("directive_args=%s" % pformat(directive_args)) logger.debug('subdirective_name=%s' % subdirective_name) (task_run_settings, command_args, run_context) \ = make_runcontext_for_directive( platform, subdirective_name, directive_args, {}, user, parent=parentcontext) logger.debug("sweep process done")
def is_triggered(self, run_settings): """ Triggered when we now that we have N nodes setup and ready to run. input_dir is assumed to be populated. """ try: schedule_completed = int(getval(run_settings, '%s/stages/schedule/schedule_completed' % RMIT_SCHEMA)) # schedule_completed = int(smartconnectorscheduler.get_existing_key(run_settings, # 'http://rmit.edu.au/schemas/stages/schedule/schedule_completed')) self.all_processes = ast.literal_eval(getval(run_settings, '%s/stages/schedule/all_processes' % RMIT_SCHEMA)) # self.all_processes = ast.literal_eval(smartconnectorscheduler.get_existing_key(run_settings, # 'http://rmit.edu.au/schemas/stages/schedule/all_processes')) except SettingNotFoundException, e: return False
def start_schedule(schedule_class, run_settings, local_settings): parent_stage = schedule_class.import_parent_stage(run_settings) map = parent_stage.get_internal_sweep_map(local_settings, run_settings=run_settings) try: isinstance(map, tuple) run_map = map[0] except TypeError: run_map = map logger.debug('map=%s' % run_map) output_storage_settings = schedule_class.get_platform_settings( run_settings, '%s/platform/storage/output' % django_settings.SCHEMA_PREFIX) offset = getval(run_settings, '%s/platform/storage/output/offset' % django_settings.SCHEMA_PREFIX) job_dir = get_job_dir(output_storage_settings, offset) schedule_class.total_processes = parent_stage.get_total_procs_per_iteration( [run_map], run_settings=run_settings, output_storage_settings=output_storage_settings, job_dir=job_dir) logger.debug('total_processes=%d' % schedule_class.total_processes) schedule_class.current_processes = [] relative_path_suffix = schedule_class.get_relative_output_path(local_settings) schedule_class.schedule_index, schedule_class.current_processes = \ start_round_robin_schedule( schedule_class.nodes, schedule_class.total_processes, schedule_class.schedule_index, local_settings, relative_path_suffix) schedule_class.all_processes = update_lookup_table( schedule_class.all_processes, new_processes=schedule_class.current_processes) logger.debug('all_processes=%s' % schedule_class.all_processes)
def addMessage(run_settings, level, msg): try: context_id = getval(run_settings, '%s/system/contextid' % RMIT_SCHEMA) except SettingNotFoundException: logger.error("unable to load contextid from run_settings") logger.error(pformat(run_settings)) return logger.debug("context_id=%s" % context_id) if not context_id: logger.error("invalid context_id") return mess = '%s,%s' % (level, msg) logger.debug("mess=%s" % mess) # Cannot write ContextMessage in same process as tasks.py # holds lock on all tables, so would get all messages # within a corestages at the end of the corestages process # With celery task, then some other worker (if available) # can do the task ASAP. # FIXME: this is circular import at global level from chiminey.smartconnectorscheduler import tasks tasks.context_message.delay(context_id, mess)
def trigger_schedule(self, run_settings): try: self.total_scheduled_procs = getval(run_settings, '%s/stages/schedule/total_scheduled_procs' % RMIT_SCHEMA) # self.total_scheduled_procs = run_settings['http://rmit.edu.au/schemas/stages/schedule'][u'total_scheduled_procs'] except SettingNotFoundException: self.total_scheduled_procs = 0 try: total_procs = int(getval(run_settings, '%s/stages/schedule/total_processes' % RMIT_SCHEMA)) # total_procs = int(run_settings['http://rmit.edu.au/schemas/stages/schedule'][u'total_processes']) if total_procs: if total_procs == self.total_scheduled_procs: return False except SettingNotFoundException, e: logger.debug(e)
def output(self, run_settings): setval(run_settings, '%s/input/mytardis/experiment_id' % django_settings.SCHEMA_PREFIX, str(self.experiment_id)) if not self.done_iterating: # trigger first of iteration corestages logger.debug("nonconvergence") setvals(run_settings, { '%s/stages/schedule/scheduled_nodes' % django_settings.SCHEMA_PREFIX: '[]', '%s/stages/execute/executed_procs' % django_settings.SCHEMA_PREFIX: '[]', '%s/stages/schedule/current_processes' % django_settings.SCHEMA_PREFIX: '[]', '%s/stages/schedule/total_scheduled_procs' % django_settings.SCHEMA_PREFIX: 0, '%s/stages/schedule/schedule_completed' % django_settings.SCHEMA_PREFIX: 0, '%s/stages/schedule/schedule_started' % django_settings.SCHEMA_PREFIX: 0 }) logger.debug('scheduled_nodes=%s' % getval(run_settings, '%s/stages/schedule/scheduled_nodes' % django_settings.SCHEMA_PREFIX)) try: delkey(run_settings, '%s/stages/run/runs_left' % django_settings.SCHEMA_PREFIX) except SettingNotFoundException: pass try: delkey(run_settings, '%s/stages/run/error_nodes' % django_settings.SCHEMA_PREFIX) except SettingNotFoundException: pass # run = run_settings['%s/stages/run' % django_settings.SCHEMA_PREFIX] # del run['error_nodes'] #update_key('converged', False, context) setval(run_settings, '%s/stages/converge/converged' % django_settings.SCHEMA_PREFIX, 0) # run_settings['%s/stages/converge' % django_settings.SCHEMA_PREFIX][u'converged'] = 0 # delete_key('runs_left', context) # delete_key('error_nodes', context) # update_key('converged', False, context) else: logger.debug("convergence") # we are done, so trigger next stage outside of converge #update_key('converged', True, context) setval(run_settings, '%s/stages/converge/converged' % django_settings.SCHEMA_PREFIX, 1) # we are done, so don't trigger iteration stages #update_key('criterion', self.criterion, context) setval(run_settings, '%s/stages/converge/criterion' % django_settings.SCHEMA_PREFIX, unicode(self.criterion)) # delete_key('error_nodes', context) #delete_key('transformed', context) try: delkey(run_settings, '%s/stages/transform/transformed' % django_settings.SCHEMA_PREFIX) except SettingNotFoundException: pass self.id += 1 # update_key('id', self.id, context) setval(run_settings, '%s/system/id' % django_settings.SCHEMA_PREFIX, self.id) return run_settings
def is_triggered(self, run_settings): """ Triggered when we now that we have N nodes setup and ready to run. input_dir is assumed to be populated. """ try: schedule_completed = int( getval( run_settings, '%s/stages/schedule/schedule_completed' % django_settings.SCHEMA_PREFIX)) self.all_processes = ast.literal_eval( getval( run_settings, '%s/stages/schedule/all_processes' % django_settings.SCHEMA_PREFIX)) except SettingNotFoundException, e: return False
def get_process_output_path(self, run_settings, process_id, local_settings): computation_platform = self.get_platform_settings( run_settings, 'http://rmit.edu.au/schemas/platform/computation') output_path = os.path.join( computation_platform['root_path'], self.get_relative_output_path(local_settings), str(process_id), getval(run_settings, 'http://rmit.edu.au/schemas/stages/run/payload_cloud_dirname')) return output_path
def get_process_output_path(self, run_settings, process_id, local_settings): computation_platform = self.get_platform_settings( run_settings, '%s/platform/computation' % django_settings.SCHEMA_PREFIX) output_path = os.path.join( computation_platform['root_path'], self.get_relative_output_path(local_settings), str(process_id), getval(run_settings, '%s/stages/setup/process_output_dirname' % django_settings.SCHEMA_PREFIX)) return output_path
def get_run_map(self, settings, **kwargs): local_settings = settings.copy() run_settings = kwargs['run_settings'] logger.debug('run_settings=%s' % run_settings) #fixme remove rand index try: rand_index = int( getval(run_settings, '%s/stages/run/rand_index' % self.SCHEMA_PREFIX)) except SettingNotFoundException: try: rand_index = int( getval(run_settings, '%s/input/hrmc/iseed' % self.SCHEMA_PREFIX)) except SettingNotFoundException, e: rand_index = 42 logger.debug(e)
def schedule_task(schedule_class, run_settings, local_settings): schedule_class.nodes = get_registered_vms(local_settings, node_type='bootstrapped_nodes') try: maximum_retry = getval(run_settings, '%s/input/reliability/maximum_retry' % RMIT_SCHEMA) except SettingNotFoundException: maximum_retry = 0 local_settings['maximum_retry'] = maximum_retry try: id = int(getval(run_settings, '%s/system/id' % RMIT_SCHEMA)) except (SettingNotFoundException, ValueError): id = 0 if schedule_class.procs_2b_rescheduled: messages.info(run_settings, '%d: rescheduling failed processes' % (id)) start_reschedule(schedule_class, run_settings, local_settings) else: messages.info(run_settings, '%d: scheduling processes' % id) start_schedule(schedule_class, run_settings, local_settings)
def set_execute_settings(self, run_settings, local_settings): self.set_domain_settings(run_settings, local_settings) update(local_settings, run_settings, '%s/stages/setup/payload_destination' % RMIT_SCHEMA, '%s/stages/setup/filename_for_PIDs' % RMIT_SCHEMA, '%s/stages/run/payload_cloud_dirname' % RMIT_SCHEMA, '%s/stages/run/compile_file' % RMIT_SCHEMA, '%s/stages/run/retry_attempts' % RMIT_SCHEMA, '%s/system/contextid' % RMIT_SCHEMA, '%s/system/random_numbers' % RMIT_SCHEMA, '%s/system/id' % RMIT_SCHEMA ) try: local_settings['curate_data'] = getval(run_settings, '%s/input/mytardis/curate_data' % RMIT_SCHEMA) except SettingNotFoundException: local_settings['curate_data'] = 0 local_settings['bdp_username'] = getval(run_settings, '%s/bdp_userprofile/username' % RMIT_SCHEMA )
def is_triggered(self, run_settings): try: converged = int( getval(run_settings, '%s/stages/converge/converged' % RMIT_SCHEMA)) logger.debug("converged=%s" % converged) except (ValueError, SettingNotFoundException) as e: return False if converged: try: run_finished = int( getval(run_settings, '%s/stages/destroy/run_finished' % RMIT_SCHEMA)) except (ValueError, SettingNotFoundException) as e: return True return not run_finished return False
def process(self, run_settings): self.experiment_id = 0 local_settings = setup_settings(run_settings) self.experiment_id = local_settings['experiment_id'] messages.info(run_settings, "1: waiting for completion") logger.debug("settings=%s" % local_settings) try: self.runs_left = ast.literal_eval(getval(run_settings, '%s/stages/make/runs_left' % RMIT_SCHEMA)) except (ValueError, SettingNotFoundException): self.runs_left = [] # if self._exists(run_settings, # 'http://rmit.edu.au/schemas/stages/make', # u'runs_left'): # self.runs_left = ast.literal_eval( # run_settings['http://rmit.edu.au/schemas/stages/make'][u'runs_left']) # else: # self.runs_left = [] def _get_dest_bdp_url(local_settings): return "%s@%s" % ( "nci", os.path.join(local_settings['payload_destination'], str(local_settings['contextid']))) dest_url = _get_dest_bdp_url(local_settings) computation_platform_url = local_settings['comp_platform_url'] bdp_username = local_settings['bdp_username'] comp_pltf_settings = manage.get_platform_settings( computation_platform_url, bdp_username) local_settings.update(comp_pltf_settings) encoded_d_url = storage.get_url_with_credentials( local_settings, dest_url, is_relative_path=True, ip_address=local_settings['host']) (scheme, host, mypath, location, query_settings) = \ storage.parse_bdpurl(encoded_d_url) if self.runs_left: job_finished = self._job_finished( settings=local_settings, remote_path=dest_url) if not job_finished: return self._get_output(local_settings, dest_url) self.runs_left -= 1 if self.runs_left <= 0: messages.success(run_settings, "%s: finished" % (1)) logger.debug("processing finished")
def copy_to_scratch_space(self, run_settings, local_settings, result_offset): bdp_username = run_settings['%s/bdp_userprofile' % django_settings.SCHEMA_PREFIX]['username'] output_storage_url = run_settings['%s/platform/storage/output' % django_settings.SCHEMA_PREFIX]['platform_url'] output_storage_settings = manage.get_platform_settings(output_storage_url, bdp_username) run_settings['%s/platform/storage/output' % django_settings.SCHEMA_PREFIX]['offset'] = self.output_loc_offset offset = run_settings['%s/platform/storage/output' % django_settings.SCHEMA_PREFIX]['offset'] self.job_dir = manage.get_job_dir(output_storage_settings, offset) iter_inputdir = os.path.join(self.job_dir, result_offset) logger.debug("iter_inputdir=%s" % iter_inputdir) input_storage_settings = self.get_platform_settings(run_settings, '%s/platform/storage/input' % django_settings.SCHEMA_PREFIX) #input_location = run_settings[django_settings.SCHEMA_PREFIX + '/input/system']['input_location'] try: input_location = getval(run_settings, django_settings.SCHEMA_PREFIX + '/input/system/input_location') except SettingNotFoundException: try: input_location = getval(run_settings, django_settings.SCHEMA_PREFIX + '/input/location/input_location') except: input_location = getval(run_settings, django_settings.SCHEMA_PREFIX + '/input/location/input/input_location') logger.debug("input_location=%s" % input_location) #todo: input location will evenatually be replaced by the scratch space that was used by the sweep #todo: the sweep will indicate the location of the scratch space in the run_settings #todo: add scheme (ssh) to inputlocation #source_url = get_url_with_credentials(local_settings, input_location) input_offset = run_settings['%s/platform/storage/input' % django_settings.SCHEMA_PREFIX]['offset'] input_url = "%s://%s@%s/%s" % (input_storage_settings['scheme'], input_storage_settings['type'], input_storage_settings['host'], input_offset) source_url = get_url_with_credentials( input_storage_settings, input_url, is_relative_path=False) logger.debug("source_url=%s" % source_url) destination_url = get_url_with_credentials( output_storage_settings, '%s://%s@%s' % (output_storage_settings['scheme'], output_storage_settings['type'], iter_inputdir), is_relative_path=False) logger.debug("destination_url=%s" % destination_url) storage.copy_directories(source_url, destination_url)
def set_bootstrap_settings(run_settings, local_settings): #logger.debug('in=%s' % run_settings) update(local_settings, run_settings, '%s/stages/setup/payload_source' % RMIT_SCHEMA, '%s/stages/setup/payload_destination' % RMIT_SCHEMA, '%s/stages/create/created_nodes' % RMIT_SCHEMA, '%s/system/contextid' % RMIT_SCHEMA) local_settings['bdp_username'] = getval( run_settings, '%s/bdp_userprofile/username' % RMIT_SCHEMA)
def process(self, run_settings): self.experiment_id = 0 local_settings = setup_settings(run_settings) self.experiment_id = local_settings['experiment_id'] messages.info(run_settings, "1: waiting for completion") logger.debug("settings=%s" % local_settings) try: self.runs_left = ast.literal_eval( getval(run_settings, '%s/stages/make/runs_left' % RMIT_SCHEMA)) except (ValueError, SettingNotFoundException): self.runs_left = [] # if self._exists(run_settings, # 'http://rmit.edu.au/schemas/stages/make', # u'runs_left'): # self.runs_left = ast.literal_eval( # run_settings['http://rmit.edu.au/schemas/stages/make'][u'runs_left']) # else: # self.runs_left = [] def _get_dest_bdp_url(local_settings): return "%s@%s" % ("nci", os.path.join( local_settings['payload_destination'], str(local_settings['contextid']))) dest_url = _get_dest_bdp_url(local_settings) computation_platform_url = local_settings['comp_platform_url'] bdp_username = local_settings['bdp_username'] comp_pltf_settings = manage.get_platform_settings( computation_platform_url, bdp_username) local_settings.update(comp_pltf_settings) encoded_d_url = storage.get_url_with_credentials( local_settings, dest_url, is_relative_path=True, ip_address=local_settings['host']) (scheme, host, mypath, location, query_settings) = \ storage.parse_bdpurl(encoded_d_url) if self.runs_left: job_finished = self._job_finished(settings=local_settings, remote_path=dest_url) if not job_finished: return self._get_output(local_settings, dest_url) self.runs_left -= 1 if self.runs_left <= 0: messages.success(run_settings, "%s: finished" % (1)) logger.debug("processing finished")
def schedule_task(schedule_class, run_settings, local_settings): #schedule_class.nodes = get_registered_vms(local_settings, node_type='bootstrapped_nodes') schedule_class.nodes = schedule_class.bootstrapped_nodes try: maximum_retry = getval(run_settings, '%s/input/reliability/maximum_retry' % django_settings.SCHEMA_PREFIX) except SettingNotFoundException: maximum_retry = 0 local_settings['maximum_retry'] = maximum_retry try: id = int(getval(run_settings, '%s/system/id' % django_settings.SCHEMA_PREFIX)) except (SettingNotFoundException, ValueError): id = 0 if schedule_class.procs_2b_rescheduled: messages.info(run_settings, '%d: rescheduling failed processes' % int(id)) start_reschedule(schedule_class, run_settings, local_settings) else: messages.info(run_settings, '%d: Scheduling processes' % int(id)) start_schedule(schedule_class, run_settings, local_settings)
def set_schedule_settings(run_settings, local_settings): update(local_settings, run_settings, '%s/system/contextid' % RMIT_SCHEMA, '%s/stages/setup/payload_destination' % RMIT_SCHEMA, '%s/stages/setup/filename_for_PIDs' % RMIT_SCHEMA, '%s/stages/setup/payload_name' % RMIT_SCHEMA, '%s/stages/bootstrap/bootstrapped_nodes' % RMIT_SCHEMA, ) local_settings['bdp_username'] = getval(run_settings, '%s/bdp_userprofile/username' % RMIT_SCHEMA)
def set_schedule_settings(run_settings, local_settings): update(local_settings, run_settings, '%s/system/contextid' % RMIT_SCHEMA, '%s/stages/setup/payload_destination' % RMIT_SCHEMA, '%s/stages/setup/filename_for_PIDs' % RMIT_SCHEMA, '%s/stages/setup/payload_name' % RMIT_SCHEMA, '%s/stages/bootstrap/bootstrapped_nodes' % RMIT_SCHEMA, ) local_settings['bdp_username'] = getval(run_settings, '%s/bdp_userprofile/username' % RMIT_SCHEMA)
def set_schedule_settings(run_settings, local_settings): update(local_settings, run_settings, '%s/system/contextid' % django_settings.SCHEMA_PREFIX, '%s/stages/setup/payload_destination' % django_settings.SCHEMA_PREFIX, '%s/stages/setup/filename_for_PIDs' % django_settings.SCHEMA_PREFIX, '%s/stages/setup/payload_name' % django_settings.SCHEMA_PREFIX, '%s/stages/bootstrap/bootstrapped_nodes' % django_settings.SCHEMA_PREFIX, '%s/stages/setup/payload_source' % django_settings.SCHEMA_PREFIX, '%s/stages/setup/process_output_dirname' % django_settings.SCHEMA_PREFIX, '%s/stages/setup/smart_connector_input' % django_settings.SCHEMA_PREFIX, ) local_settings['bdp_username'] = getval( run_settings, '%s/bdp_userprofile/username' % django_settings.SCHEMA_PREFIX) try: local_settings['non_cloud_proc_id'] = int(getval(run_settings, '%s/system/id' % django_settings.SCHEMA_PREFIX)) + 1 except (SettingNotFoundException, ValueError): local_settings['non_cloud_proc_id'] = 1
def trigger_schedule(self, run_settings): try: self.total_scheduled_procs = getval( run_settings, '%s/stages/schedule/total_scheduled_procs' % django_settings.SCHEMA_PREFIX) except SettingNotFoundException: self.total_scheduled_procs = 0 try: total_procs = int( getval( run_settings, '%s/stages/schedule/total_processes' % django_settings.SCHEMA_PREFIX)) if total_procs: if total_procs == self.total_scheduled_procs: return False except SettingNotFoundException, e: logger.debug(e)
def set_bootstrap_settings(self, run_settings, local_settings): super(CloudStrategy, self).set_bootstrap_settings(run_settings, local_settings) try: payload_source = getval( run_settings, '%s/stages/setup/payload_source' % RMIT_SCHEMA) except SettingNotFoundException: pass if payload_source: bootstrap.set_bootstrap_settings(run_settings, local_settings)
def is_triggered(self, run_settings): logger.debug('run_settings=%s' % run_settings) try: configure_done = int(getval(run_settings, '%s/stages/sweep/sweep_done' % RMIT_SCHEMA)) except (ValueError, SettingNotFoundException): return True return not configure_done
def get_internal_sweep_map(self, settings, **kwargs): run_settings = kwargs['run_settings'] rand_index = 42 if '%s/input/charlie' % django_settings.SCHEMA_PREFIX in run_settings: try: internal_sweep_map = getval( run_settings, '%s/input/charlie/internal_sweep_map' % django_settings.SCHEMA_PREFIX) logger.debug("internal_sweep_map=%s" % internal_sweep_map) except ValueError: logger.error( "cannot convert %s to internal_sweep_map" % getval( run_settings, '%s/input/charlie/internal_sweep_map' % django_settings.SCHEMA_PREFIX)) try: map = dict(ast.literal_eval(internal_sweep_map)) except ValueError, e: logger.error(e)
def is_triggered(self, run_settings): try: self.created_nodes = ast.literal_eval( getval( run_settings, '%s/stages/create/created_nodes' % django_settings.SCHEMA_PREFIX)) running_created_nodes = [ x for x in self.created_nodes if str(x[3]) == 'running' ] logger.debug('running_created_nodes=%s' % running_created_nodes) if len(running_created_nodes) == 0: return False except (SettingNotFoundException, ValueError) as e: logger.debug(e) return False try: bootstrap_done = int( getval( run_settings, '%s/stages/bootstrap/bootstrap_done' % django_settings.SCHEMA_PREFIX)) if not bootstrap_done: return False except (SettingNotFoundException, ValueError) as e: logger.debug(e) return False try: self.bootstrapped_nodes = ast.literal_eval( getval( run_settings, '%s/stages/bootstrap/bootstrapped_nodes' % django_settings.SCHEMA_PREFIX)) if len(self.bootstrapped_nodes) == 0: return False except (SettingNotFoundException, ValueError) as e: return False try: reschedule_str = getval( run_settings, '%s/stages/schedule/procs_2b_rescheduled' % django_settings.SCHEMA_PREFIX) self.procs_2b_rescheduled = ast.literal_eval(reschedule_str) except SettingNotFoundException, e: # FIXME: when is procs_2b_rescheduled set? logger.debug(e) self.procs_2b_rescheduled = []
def setup_input(self, run_settings): self.input_platform_name = '' self.input_platform_offset = '' if self.input_exists(run_settings): try: run_settings['%s/platform/storage/input' % django_settings.SCHEMA_PREFIX] except KeyError: try: bdp_url = getval(run_settings, django_settings.SCHEMA_PREFIX + '/input/system/input_location') except SettingNotFoundException: try: bdp_url = getval(run_settings, django_settings.SCHEMA_PREFIX + '/input/location/input_location') except SettingNotFoundException: bdp_url = getval(run_settings, django_settings.SCHEMA_PREFIX + '/input/location/input/input_location') self.input_platform_name, self.input_platform_offset = self.break_bdp_url(bdp_url) run_settings[django_settings.SCHEMA_PREFIX + '/platform/storage/input'] = {} run_settings[django_settings.SCHEMA_PREFIX + '/platform/storage/input'][ 'platform_url'] = self.input_platform_name run_settings[django_settings.SCHEMA_PREFIX + '/platform/storage/input']['offset'] = self.input_platform_offset
def is_triggered(self, run_settings): try: configure_done = int(getval(run_settings, '%s/stages/configure/configure_done' % django_settings.SCHEMA_PREFIX)) except SettingNotFoundException: return True except ValueError: return True else: return not configure_done
def process(self, run_settings): """ perform the stage operation """ bdp_username = getval(run_settings, '%s/bdp_userprofile/username' % RMIT_SCHEMA) # bdp_username = run_settings[RMIT_SCHEMA + '/bdp_userprofile']['username'] logger.debug("bdp_username=%s" % bdp_username) input_storage_url = getval(run_settings, '%s/platform/storage/input/platform_url' % RMIT_SCHEMA) # input_storage_url = run_settings[ # RMIT_SCHEMA + '/platform/storage/input']['platform_url'] logger.debug("input_storage_url=%s" % input_storage_url) input_storage_settings = manage.get_platform_settings( input_storage_url, bdp_username) logger.debug("input_storage_settings=%s" % pformat(input_storage_settings)) input_offset = getval(run_settings, '%s/platform/storage/input/offset' % RMIT_SCHEMA) # input_offset = run_settings[RMIT_SCHEMA + "/platform/storage/input"]['offset'] logger.debug("input_offset=%s" % pformat(input_offset)) input_prefix = '%s://%s@' % (input_storage_settings['scheme'], input_storage_settings['type']) map_initial_location = "%s/%s/initial" % (input_prefix, input_offset) logger.debug("map_initial_location=%s" % map_initial_location) local_settings = setup_settings(run_settings) logger.debug("local_settings=%s" % local_settings) values_map = _load_values_map(local_settings, map_initial_location) logger.debug("values_map=%s" % values_map) _upload_variations_inputs( local_settings, map_initial_location, values_map) _upload_payload(local_settings, local_settings['payload_source'], values_map) messages.info(run_settings, "1: upload done")
def input_valid(self, settings_to_test): """ Return a tuple, where the first element is True settings_to_test are syntactically and semantically valid for this stage. Otherwise, return False with the second element in the tuple describing the problem """ error = [] try: int(getval(settings_to_test, '%s/input/hrmc/max_iteration' % RMIT_SCHEMA)) except (ValueError, SettingNotFoundException): error.append("Cannot load max_iteration") try: float(getval(settings_to_test, '%s/input/hrmc/error_threshold' % RMIT_SCHEMA)) except (SettingNotFoundException, ValueError): error.append("Cannot load error threshold") if error: return (False, '. '.join(error)) return (True, "ok")
def schedule_task(schedule_class, run_settings, local_settings): schedule_class.nodes = get_registered_vms(local_settings, node_type='bootstrapped_nodes') try: maximum_retry = getval(run_settings, '%s/input/reliability/maximum_retry' % RMIT_SCHEMA) except SettingNotFoundException: maximum_retry = 0 local_settings['maximum_retry'] = maximum_retry if schedule_class.procs_2b_rescheduled: start_reschedule(schedule_class, run_settings, local_settings) else: start_schedule(schedule_class, run_settings, local_settings)
def trigger_reschedule(self, run_settings): try: self.total_rescheduled_procs = getval( run_settings, '%s/stages/schedule/total_rescheduled_procs' % django_settings.SCHEMA_PREFIX) except SettingNotFoundException: self.total_rescheduled_procs = 0 self.total_procs_2b_rescheduled = len(self.procs_2b_rescheduled) if self.total_procs_2b_rescheduled == self.total_rescheduled_procs: return False
def is_triggered(self, run_settings): try: reschedule_str = getval( run_settings, '%s/stages/schedule/procs_2b_rescheduled' % RMIT_SCHEMA) self.procs_2b_rescheduled = ast.literal_eval(reschedule_str) logger.debug('self.procs_2b_rescheduled=%s' % self.procs_2b_rescheduled) if self.procs_2b_rescheduled: return False except SettingNotFoundException, e: logger.debug(e)
def post_mytardis_exp(run_settings, experiment_id, output_location, experiment_paramset=[]): # TODO: move into mytardis package? bdp_username = getval( run_settings, '%s/bdp_userprofile/username' % django_settings.SCHEMA_PREFIX) try: mytardis_url = getval( run_settings, '%s/input/mytardis/mytardis_platform' % django_settings.SCHEMA_PREFIX) except SettingNotFoundException: logger.error("mytardis_platform not set") return 0 mytardis_settings = manage.get_platform_settings(mytardis_url, bdp_username) logger.debug(mytardis_settings) curate_data = getval( run_settings, '%s/input/mytardis/curate_data' % django_settings.SCHEMA_PREFIX) if curate_data: if mytardis_settings['mytardis_host']: def _get_exp_name_for_input(path): return str(os.sep.join(path.split(os.sep)[-1:])) ename = _get_exp_name_for_input(output_location) logger.debug("ename=%s" % ename) experiment_id = mytardis.create_experiment( settings=mytardis_settings, exp_id=experiment_id, experiment_paramset=experiment_paramset, expname=ename) else: logger.warn("no mytardis host specified") else: logger.warn('Data curation is off') return experiment_id
def process(self, run_settings): logger.debug('run_settings=%s' % run_settings) self.setup_output(run_settings) self.setup_input(run_settings) self.setup_computation(run_settings) messages.info(run_settings, "0: Setting up computation") local_settings = getvals(run_settings, models.UserProfile.PROFILE_SCHEMA_NS) # local_settings = run_settings[models.UserProfile.PROFILE_SCHEMA_NS] logger.debug("settings=%s" % pformat(run_settings)) local_settings['bdp_username'] = getval(run_settings, '%s/bdp_userprofile/username' % django_settings.SCHEMA_PREFIX) # local_settings['bdp_username'] = run_settings[ # django_settings.SCHEMA_PREFIX + '/bdp_userprofile']['username'] logger.debug('local_settings=%s' % local_settings) self.setup_scratchspace(run_settings) output_location = self.output_loc_offset # run_settings[django_settings.SCHEMA_PREFIX + '/input/system'][u'output_location'] try: self.experiment_id = int(getval(run_settings, '%s/input/mytardis/experiment_id' % django_settings.SCHEMA_PREFIX)) except KeyError: self.experiment_id = 0 except ValueError: self.experiment_id = 0 try: curate_data = getval(run_settings, '%s/input/mytardis/curate_data' % django_settings.SCHEMA_PREFIX) except SettingNotFoundException: curate_data = False if curate_data: try: mytardis_platform = jobs.safe_import('chiminey.platform.mytardis.MyTardisPlatform', [], {}) self.experiment_id = mytardis_platform.create_experiment(run_settings, output_location, self.experiment_id) except ImproperlyConfigured as e: logger.error("Cannot load mytardis platform hook %s" % e)
def is_triggered(self, run_settings): # if we have no runs_left then we must have finished all the runs try: runs_left = ast.literal_eval( getval(run_settings, '%s/stages/make/runs_left' % RMIT_SCHEMA)) except ValueError: pass except SettingNotFoundException: pass else: # TODO: should check program_success? if runs_left: try: running = getval(run_settings, '%s/stages/make/running' % RMIT_SCHEMA) except SettingNotFoundException: return False return running return False
def make_local_settings(run_settings): from copy import deepcopy local_settings = deepcopy(getvals(run_settings, models.UserProfile.PROFILE_SCHEMA_NS)) update(local_settings, run_settings, RMIT_SCHEMA + '/system/platform', # RMIT_SCHEMA + '/input/mytardis/experiment_id', # RMIT_SCHEMA + '/system/random_numbers', ) local_settings['bdp_username'] = getval( run_settings, '%s/bdp_userprofile/username' % RMIT_SCHEMA) return local_settings
def input_valid(self, settings_to_test): #fixme: move to hrmc with ignored(SettingNotFoundException, ValueError): iseed = int( getval(settings_to_test, '%s/input/hrmc/iseed' % django_settings.SCHEMA_PREFIX)) NUMBER_SEEDS = 10000 #fixme: should be length of no lines on random_number file if not iseed in range(0, NUMBER_SEEDS): return (False, 'Random Number Seed should be in range (0, %d)' % (NUMBER_SEEDS - 1)) return (True, 'valid input')
def is_triggered(self, run_settings): """ """ try: transformed = int(getval(run_settings, '%s/stages/transform/transformed' % RMIT_SCHEMA)) return transformed except (SettingNotFoundException, ValueError): pass return False
def is_triggered(self, run_settings): """ Triggered when we now that we have N nodes setup and ready to run. input_dir is assumed to be populated. """ try: schedule_completed = int( getval( run_settings, '%s/stages/schedule/schedule_completed' % self.SCHEMA_PREFIX)) # schedule_completed = int(smartconnectorscheduler.get_existing_key(run_settings, # 'http://rmit.edu.au/schemas/stages/schedule/schedule_completed')) self.all_processes = ast.literal_eval( getval(run_settings, '%s/stages/schedule/all_processes' % self.SCHEMA_PREFIX)) # self.all_processes = ast.literal_eval(smartconnectorscheduler.get_existing_key(run_settings, # 'http://rmit.edu.au/schemas/stages/schedule/all_processes')) except SettingNotFoundException, e: return False