def __init__(self, test_run=False): self.master_db = SPGMasterDB( EnsembleConstructor=ParameterEnsembleThreaded) self.test_run = test_run self.lock = threading.Lock() self.db_locks = {} ### :::~ The number of processes that are active in each spg file self.active_processes = defaultdict(lambda: 0)
def __init__(self): cmd.Cmd.__init__(self) self.prompt = "| spg-queue :::~ " self.current_queue = None self.master_db = SPGMasterDB() # self.possible_keys = ['max_jobs', 'status'] self.__update_queue_list()
def __init__(self, test_run = False): self.master_db = SPGMasterDB( EnsembleConstructor = ParameterEnsembleThreaded ) self.test_run = test_run self.lock = threading.Lock() self.db_locks = {} ### :::~ The number of processes that are active in each spg file self.active_processes = defaultdict(lambda : 0)
class SPGRunningPool(): def __init__(self): self.master_db = SPGMasterDB( EnsembleConstructor = ParameterEnsembleThreaded ) self.lock = threading.Lock() self.db_locks = {} def get_lock(self, i_db): if not self.db_locks.has_key( i_db.full_name ): self.db_locks[ i_db.full_name ] = threading.Lock() return self.db_locks[ i_db.full_name ] def launch_workers(self): target_jobs, = self.master_db.query_master_fetchone('SELECT max_jobs FROM queues WHERE name = "default"') self.master_db.update_list_ensemble_dbs() if len(self.master_db.active_dbs) == 0: utils.inline_msg("MSG", "No active dbs... sleeping ") return current_count = self.active_threads() to_launch = target_jobs - current_count if to_launch >= 0: utils.newline_msg("STATUS", "[n_jobs=%d] run=%d ::: new=%d" % (target_jobs,current_count,to_launch ) ) else: utils.newline_msg("STATUS", "[n_jobs=%d] run=%d :!: exceed" % (target_jobs,current_count)) for i_t in range(to_launch): self.lock.acquire() pick = self.master_db.pick_ensemble() status = pick.get_updated_status() if status['process_not_run'] == 0: print "+D+ ----- %s " % (pick.full_name) self.master_db.query_master_db('UPDATE dbs SET status= ? WHERE full_name = ?', "D", pick.full_name) return self.lock.release() nt = SPGRunningAtom(pick, self.lock) # nt = SPGRunningAtom(pick, lock=self.get_lock( pick ) ) nt.start() def active_threads(self): return threading.active_count() - 1 #rp = SPGRunningPool(50, 2) #rp.run()
def __init__(self, EnsembleConstructor = ParameterEnsemble): BaseSPGCommandLine.__init__(self, EnsembleConstructor) self.prompt = "| spg-db :::~ " # self.current_param_db = None self.master_db = SPGMasterDB(EnsembleConstructor = EnsembleConstructor)
class SPGDBCommandLine(BaseSPGCommandLine): """A command handler which interfaces a master DB""" def __init__(self, EnsembleConstructor = ParameterEnsemble): BaseSPGCommandLine.__init__(self, EnsembleConstructor) self.prompt = "| spg-db :::~ " # self.current_param_db = None self.master_db = SPGMasterDB(EnsembleConstructor = EnsembleConstructor) def filter_db_list(self, ls = None, filter = None): if ls == None: ls = self.master_db.result_dbs.keys() if re.match("^\d+?$", filter): #:::~ Is filter an integer??? id = int(filter) rdb = self.master_db.result_dbs filtered = [ x for x in ls if rdb.has_key(x) and rdb[x] is not None and rdb[x]['id'] == id ] return filtered if filter: ret = fnmatch.filter(ls, filter) else: ret = ls return sorted( ret ) # def get_db(self, c): # """it returns the db name (or None) of a database identified either from its id or """ # db_name = c.strip() # if db_name.isdigit(): # id = int( db_name ) # rdb = self.master_db.result_dbs # filtered = [x for x in rdb if rdb[x] is not None and rdb[x]['id'] == id] # if filtered: # db_name = filtered[0] # else: # utils.newline_msg("ERR", "database with id '%s' doesn't exist." % c) # return None # # full_name, path, base_name, extension = utils.translate_name(db_name) # db_name = "%s/%s.spgql" % (path, base_name) # sim_name = "%s/%s.spg" % (path, base_name) # if not os.path.exists(db_name) and not os.path.exists(sim_name): # utils.newline_msg("ERR", "database with name '%s' doesn't exist." % c) # return None # return self.EnsembleConstructor(db_name, init_db=True) # # # # # if self.master_db.result_dbs.has_key(db_name): # # # return self.master_db.result_dbs[db_name] # # # else: # # # utils.newline_msg("WRN", "database '%s' is not registered, loading it anyhow" % db_name ) # # return None # def get_flags_and_db(self, c,init_db = True): flags, args = self.parse_command_line(c) # if len(args)== 1: # utils.newline_msg("ERR", "a single file was expected or could not parse flags") # return flags, args, None db_name = args[-1] args = args[:-1] if db_name.isdigit(): id = int(db_name) rdb = self.master_db.result_dbs filtered = [x for x in rdb if rdb[x]['id'] == id] if filtered: db_name = filtered[0] else: utils.newline_msg("ERR", "database with id '%s' doesn't exist." % db_name) return flags, args.append(db_name), None else: full_name, path, base_name, extension = utils.translate_name(db_name) # print "do_init::: ",self.translate_name(i_arg) db_name = "%s/%s.spgql" % (path, base_name) # sim_name = "%s/%s.spg" % (path, base_name) if not os.path.exists(db_name): utils.newline_msg("ERR", "database with name '%s' doesn't exist." % utils.shorten_name(db_name)) return flags, args.append(db_name), None return flags, args, self.EnsembleConstructor(db_name, init_db) def do_ls(self, c): """ls REGEXP|DB_ID lists the databases already registered in the master database and the possible ones found in the current directory""" ls_res_db = self.filter_db_list( filter = c ) if ls_res_db: print " --- registered dbs" for i in sorted( ls_res_db ): # :::~FIXME workaround for non-existing dbs curr_db = self.master_db.result_dbs[i] short_name = utils.shorten_name(i) try: print "%5d: %s (%5.5f)"%(curr_db['id'], short_name , curr_db['weight'] ) except: print "%5d: %s "%(curr_db['id'], short_name ) BaseSPGCommandLine.do_ls(self, c ) # def do_load(self,c): # """load DB_NAME|DB_ID # loads one of the registered databases from the master""" # c = c.split() # if len(c) >1: # utils.newline_msg("ERR", "only one db can be loaded at a time", 2) # return # ret = self.get_db_from_cmdline(c[0]) # if ret: # self.current_param_db = ret # print " --- loaded db '%s'"% utils.shorten_name(self.current_param_db.full_name) # # else: # # utils.newline_msg("ERR", "db does not exist", 2) def do_init(self, c): """init [--flag ...] PARAMETERS_NAME|DB_NAME [VAR1=VALUE1[:VAR2=VALUE2]] Generates a new database out of a simulation.dat FLAGS::: --purge: deletes the spgql database, if it already exists --repeat=REPEAT repeats the parameter generation REPEAT times """ flags, db_arg = self.parse_command_line(c) if len(db_arg) != 1: utils.newline_msg("WRN", "init must be called with a database", 2) return db_arg = db_arg[0] # i_arg = c[0] full_name, path, base_name, extension = utils.translate_name(db_arg) # print "do_init::: ",self.translate_name(i_arg) full_db_name = "%s/%s.spgql" % (path, base_name) sim_name = "%s/%s.spg" % (path, base_name) if os.path.exists(full_db_name) and "purge" not in flags: utils.newline_msg("ERR", "database '%s' already exists. Cannot init it twice" % utils.shorten_name(full_db_name), 2) return if not os.path.exists(sim_name): utils.newline_msg("ERR", "configuration '%s' doesn't exist. Cannot init it" % utils.shorten_name(sim_name), 2) return if "purge" in flags: try: self.do_deregister(db_arg) os.remove(full_db_name) except: utils.newline_msg("WRN", "database '%s' could not be removed... skipping" % full_db_name) if 'repeat' in flags: repeat = int(flags['repeat']) else: repeat = 1 parser = MultIteratorDBBuilder(db_name=full_db_name) parser.init_db() parser.fill_status(repeat=repeat) current_param_db = ParameterEnsemble(full_db_name, init_db=True) current_param_db.repeat = repeat # if len(c) > 1: self.do_set(":".join(c[1:])) self.master_db.write_ensemble_to_master(current_param_db) self.master_db.update_list_ensemble_dbs() print " **-- init - %d: '%s' " % (current_param_db.id, utils.shorten_name(current_param_db.full_name)) def complete_init(self, text, line, begidx, endidx): completions = fnmatch.filter(os.listdir("."), ".spgql") completions.extend(fnmatch.filter(os.listdir("."), "*.spg")) if text: completions = [f for f in completions if f.startswith(text) ] return completions def do_register(self, c): """registers a given results database into the master database""" flags, cmds, ensemble = self.get_flags_and_db(c) if ensemble is None: # utils.newline_msg("ERR", "no database supplied ... skipping") return if self.master_db.result_dbs.has_key(ensemble.full_name): utils.newline_msg("WRN", "skipping... database '%s' is already registered" % utils.shorten_name(ensemble.full_name), 2) return # current_param_db = ParameterEnsemble(db_name, init_db=True) self.master_db.write_ensemble_to_master(ensemble ) self.master_db.update_list_ensemble_dbs() print " *--- registered - %d: '%s' " % (ensemble.id, utils.shorten_name(ensemble.full_name) ) def complete_register(self, text, line, begidx, endidx): return self.complete_init(text, line, begidx, endidx) def do_clean(self, c): """clean [-flag ...] PARAMETERS_NAME|DB_NAME [VAR1=VALUE1[:VAR2=VALUE2]] if not arguments are given sets all the rows in run_status with status R, E to N FLAGS::: --all: sets all the rows in run_status to N """ #:::~ OK, as of 13.10.11 flags, cmds, ensemble = self.get_flags_and_db(c) # ensemble = self.get_db(db_name) if ensemble is None: # utils.newline_msg("ERR", "database not found... aborting") return if "all" in flags: ensemble.execute_query('UPDATE run_status SET status = "N" ') else: ensemble.execute_query('UPDATE run_status SET status = "N" WHERE status ="R" OR status ="E" ') def complete_clean(self, text, line, begidx, endidx): return self.complete_init(text, line, begidx, endidx) def do_deregister(self, c): """remove current_db|FILENAME|_ID_ deregisters a simulation file simulations. Does not remove them from disk except --purge is used FLAGS::: --purge: deletes the spgql database, if it already exists""" flags, cmds, ensemble = self.get_flags_and_db(c,init_db=False) if ensemble is None: # utils.newline_msg("ERR", "no database supplied nor currently set... skipping") return # ensemble = self.get_db_from_cmdline(db_name) # if not self.current_param_db is None and self.current_param_db.full_name == db_name: # self.current_param_db = None if "purge" in flags and os.path.exists(ensemble.full_name): os.remove(ensemble.full_name) self.master_db.query_master_db("DELETE FROM dbs WHERE full_name = ?", ensemble.full_name) if self.master_db.result_dbs.has_key(ensemble.full_name): del self.master_db.result_dbs[ensemble.full_name] # :::~ FIXME self.master_db.synchronise_master_db() def complete_deregister(self, text, line, begidx, endidx): return self.complete_init(text, line, begidx, endidx) def do_set_weight(self, c): flags, args, ensemble = self.get_flags_and_db(c) # print flags, args, ensemble if ensemble == None: return try: new_weight = float(args[0]) except: utils.newline_msg("ERR", "cannot parse weight") return # print "UPDATE dbs SET weight=%f WHERE full_name = '%s' " % ( new_weight, ensemble.full_name ) try: self.master_db.query_master_db("UPDATE dbs SET weight=%f WHERE full_name = '%s' " % ( new_weight, ensemble.full_name ) ) except: utils.newline_msg("ERR", "cannot parse command") return self.master_db.update_list_ensemble_dbs() # def do_set(self, c): # """set VAR1=VALUE1 VAR2=VALUE2 # sets some values in the currently loaded database # FLAGS::: --help, the possible keys are printed """ # # # print c # flags, c = self.parse_command_line(c) # # if "help" in flags: # print utils.newline_msg("HELP", " possible_keys = %s" % self.possible_keys) # return # # if not self.current_param_db: # utils.newline_msg("WRN", "not database loaded... skipping") # return # # for iarg in c: # ret = utils.parse_to_dict(iarg, allowed_keys=self.possible_keys) # if not ret: # utils.newline_msg("ERR", "'%s' not understood" % iarg) # return # # # if k == "repeat": continue # repeat is not in the master db (should it be added) # for k in ret: # self.current_param_db.__dict__[k] = ret[k] # self.master_db.query_master_db('UPDATE dbs SET %s= ? WHERE id = ?' % k, ret[k], # self.current_param_db.id) def __set_status(self, c, st): # if not c: # ls_res_db = [ self.current_param_db.full_name ] # else: # ls_res_db = self.filter_db_list( filter = c ) # if not ls_res_db: return # # for i in ls_res_db: flags, cmds, ensemble = self.get_flags_and_db(c) # ensemble = self.get_db(db_name) if ensemble is None: # utils.newline_msg("ERR", "database not found... aborting") return ensemble.status = st print " +--- '%s' [status : %s ]" % (utils.shorten_name(ensemble.full_name), st) self.master_db.query_master_db('UPDATE dbs SET status= ? WHERE full_name = ?', st, ensemble.full_name) # # def do_stop(self, c): # """stops the currently loaded registered database""" # self.__set_status(c, 'S') # def do_start(self, c): """starts the currently loaded registered database""" self.__set_status(c, 'R') def do_pause(self, c): """pauses the currently loaded registered database""" self.__set_status(c, 'P') def do_set_jobs(self, c): """sets the maximum number of jobs running concurrently usage: N_JOBS""" c = c.split() if len(c) == 1: max_jobs = int(c[0]) self.master_db.query_master_db('UPDATE queues SET max_jobs= ? WHERE name = "default"', max_jobs) def do_get_jobs(self, c): """returns the number of jobs that would concurrently run in a multi-threaded run""" nj, = self.master_db.query_master_fetchone('SELECT max_jobs FROM queues WHERE name = "default"') print " +--- no_jobs = %d " % nj def do_info(self, c): """info REGEXP prints the information of the results databases, filtered by a regular expression, or its id """ flags, cmds, ensemble = self.get_flags_and_db(c) # ensemble = self.get_db(db_name) if ensemble is None: # utils.newline_msg("ERR", "database not found... aborting") return db_status = ensemble.get_updated_status() param_db_id = self.master_db.query_master_fetchone("SELECT id, status, weight FROM dbs WHERE full_name = ?", ensemble.full_name) if param_db_id is None: param_db_id = "X" else: [param_db_id, status, weight] = param_db_id print " ---%5s: %s" % (param_db_id, utils.shorten_name(ensemble.full_name)) frac_done = float(db_status['process_done']) / float(db_status['value_set']) n_repet = db_status['value_set_with_rep'] / db_status['value_set'] print " -+ status = %s / weight: %5.5f " % (status, weight) print " -+ total = %d*%d / done: %d (%.5f) - running: %d - error: %d " % ( db_status['value_set'], n_repet, db_status['process_done'], frac_done, db_status['process_running'], db_status['process_error']) try: print " -+ time = %f / mean: %f - min: %f - max: %f" % ( db_status['total_run_time'], db_status['avg_run_time'], db_status['min_run_time'], db_status['max_run_time']) except: pass
def __init__(self, EnsembleConstructor=ParameterEnsemble): BaseSPGCommandLine.__init__(self, EnsembleConstructor) self.prompt = "| spg-db :::~ " # self.current_param_db = None self.master_db = SPGMasterDB(EnsembleConstructor=EnsembleConstructor)
class SPGDBCommandLine(BaseSPGCommandLine): """A command handler which interfaces a master DB""" def __init__(self, EnsembleConstructor=ParameterEnsemble): BaseSPGCommandLine.__init__(self, EnsembleConstructor) self.prompt = "| spg-db :::~ " # self.current_param_db = None self.master_db = SPGMasterDB(EnsembleConstructor=EnsembleConstructor) def filter_db_list(self, ls=None, filter=None): if ls == None: ls = list(self.master_db.result_dbs.keys()) if re.match("^\d+?$", filter): #:::~ Is filter an integer??? id = int(filter) rdb = self.master_db.result_dbs filtered = [ x for x in ls if x in rdb and rdb[x] is not None and rdb[x]['id'] == id ] return filtered if filter: ret = fnmatch.filter(ls, filter) else: ret = ls return sorted(ret) # def get_db(self, c): # """it returns the db name (or None) of a database identified either from its id or """ # db_name = c.strip() # if db_name.isdigit(): # id = int( db_name ) # rdb = self.master_db.result_dbs # filtered = [x for x in rdb if rdb[x] is not None and rdb[x]['id'] == id] # if filtered: # db_name = filtered[0] # else: # utils.newline_msg("ERR", "database with id '%s' doesn't exist." % c) # return None # # full_name, path, base_name, extension = utils.translate_name(db_name) # db_name = "%s/%s.spgql" % (path, base_name) # sim_name = "%s/%s.spg" % (path, base_name) # if not os.path.exists(db_name) and not os.path.exists(sim_name): # utils.newline_msg("ERR", "database with name '%s' doesn't exist." % c) # return None # return self.EnsembleConstructor(db_name, init_db=True) # # # # # if self.master_db.result_dbs.has_key(db_name): # # # return self.master_db.result_dbs[db_name] # # # else: # # # utils.newline_msg("WRN", "database '%s' is not registered, loading it anyhow" % db_name ) # # return None # def get_flags_and_db(self, c, init_db=True): flags, args = self.parse_command_line(c) # if len(args)== 1: # utils.newline_msg("ERR", "a single file was expected or could not parse flags") # return flags, args, None db_name = args[-1] args = args[:-1] if db_name.isdigit(): id = int(db_name) rdb = self.master_db.result_dbs filtered = [x for x in rdb if rdb[x]['id'] == id] if filtered: db_name = filtered[0] else: utils.newline_msg( "ERR", "database with id '%s' doesn't exist." % db_name) return flags, args.append(db_name), None else: full_name, path, base_name, extension = utils.translate_name( db_name) # print "do_init::: ",self.translate_name(i_arg) db_name = "%s/%s.spgql" % (path, base_name) # sim_name = "%s/%s.spg" % (path, base_name) if not os.path.exists(db_name): utils.newline_msg( "ERR", "database with name '%s' doesn't exist." % utils.shorten_name(db_name)) return flags, args.append(db_name), None return flags, args, self.EnsembleConstructor(db_name, init_db) def do_ls(self, c): """ls REGEXP|DB_ID lists the databases already registered in the master database and the possible ones found in the current directory""" ls_res_db = self.filter_db_list(filter=c) if ls_res_db: print(" --- registered dbs") for i in sorted(ls_res_db): # :::~FIXME workaround for non-existing dbs curr_db = self.master_db.result_dbs[i] short_name = utils.shorten_name(i) try: print("%5d: %s (%5.5f)" % (curr_db['id'], short_name, curr_db['weight'])) except: print("%5d: %s " % (curr_db['id'], short_name)) BaseSPGCommandLine.do_ls(self, c) # def do_load(self,c): # """load DB_NAME|DB_ID # loads one of the registered databases from the master""" # c = c.split() # if len(c) >1: # utils.newline_msg("ERR", "only one db can be loaded at a time", 2) # return # ret = self.get_db_from_cmdline(c[0]) # if ret: # self.current_param_db = ret # print " --- loaded db '%s'"% utils.shorten_name(self.current_param_db.full_name) # # else: # # utils.newline_msg("ERR", "db does not exist", 2) def do_init(self, c): """init [--flag ...] PARAMETERS_NAME|DB_NAME [VAR1=VALUE1[:VAR2=VALUE2]] Generates a new database out of a simulation.dat FLAGS::: --purge: deletes the spgql database, if it already exists --repeat=REPEAT repeats the parameter generation REPEAT times """ flags, db_arg = self.parse_command_line(c) if len(db_arg) != 1: utils.newline_msg("WRN", "init must be called with a database", 2) return db_arg = db_arg[0] # i_arg = c[0] full_name, path, base_name, extension = utils.translate_name(db_arg) # print "do_init::: ",self.translate_name(i_arg) full_db_name = "%s/%s.spgql" % (path, base_name) sim_name = "%s/%s.spg" % (path, base_name) if os.path.exists(full_db_name) and "purge" not in flags: utils.newline_msg( "ERR", "database '%s' already exists. Cannot init it twice" % utils.shorten_name(full_db_name), 2) return if not os.path.exists(sim_name): utils.newline_msg( "ERR", "configuration '%s' doesn't exist. Cannot init it" % utils.shorten_name(sim_name), 2) return if "purge" in flags: try: self.do_deregister(db_arg) os.remove(full_db_name) except: utils.newline_msg( "WRN", "database '%s' could not be removed... skipping" % full_db_name) if 'repeat' in flags: repeat = int(flags['repeat']) else: repeat = 1 parser = MultIteratorDBBuilder(db_name=full_db_name) parser.init_db() parser.fill_status(repeat=repeat) current_param_db = ParameterEnsemble(full_db_name, init_db=True) current_param_db.repeat = repeat # if len(c) > 1: self.do_set(":".join(c[1:])) self.master_db.write_ensemble_to_master(current_param_db) self.master_db.update_list_ensemble_dbs() print(" **-- init - %d: '%s' " % (current_param_db.id, utils.shorten_name(current_param_db.full_name))) def complete_init(self, text, line, begidx, endidx): completions = fnmatch.filter(os.listdir("."), ".spgql") completions.extend(fnmatch.filter(os.listdir("."), "*.spg")) if text: completions = [f for f in completions if f.startswith(text)] return completions def do_register(self, c): """registers a given results database into the master database""" flags, cmds, ensemble = self.get_flags_and_db(c) if ensemble is None: # utils.newline_msg("ERR", "no database supplied ... skipping") return if ensemble.full_name in self.master_db.result_dbs: utils.newline_msg( "WRN", "skipping... database '%s' is already registered" % utils.shorten_name(ensemble.full_name), 2) return # current_param_db = ParameterEnsemble(db_name, init_db=True) self.master_db.write_ensemble_to_master(ensemble) self.master_db.update_list_ensemble_dbs() print(" *--- registered - %d: '%s' " % (ensemble.id, utils.shorten_name(ensemble.full_name))) def complete_register(self, text, line, begidx, endidx): return self.complete_init(text, line, begidx, endidx) def do_clean(self, c): """clean [-flag ...] PARAMETERS_NAME|DB_NAME [VAR1=VALUE1[:VAR2=VALUE2]] if not arguments are given sets all the rows in run_status with status R, E to N FLAGS::: --all: sets all the rows in run_status to N """ #:::~ OK, as of 13.10.11 flags, cmds, ensemble = self.get_flags_and_db(c) # ensemble = self.get_db(db_name) if ensemble is None: # utils.newline_msg("ERR", "database not found... aborting") return if "all" in flags: ensemble.execute_query('UPDATE run_status SET status = "N" ') else: ensemble.execute_query( 'UPDATE run_status SET status = "N" WHERE status ="R" OR status ="E" ' ) def complete_clean(self, text, line, begidx, endidx): return self.complete_init(text, line, begidx, endidx) def do_deregister(self, c): """remove current_db|FILENAME|_ID_ deregisters a simulation file simulations. Does not remove them from disk except --purge is used FLAGS::: --purge: deletes the spgql database, if it already exists""" flags, cmds, ensemble = self.get_flags_and_db(c, init_db=False) if ensemble is None: # utils.newline_msg("ERR", "no database supplied nor currently set... skipping") return # ensemble = self.get_db_from_cmdline(db_name) # if not self.current_param_db is None and self.current_param_db.full_name == db_name: # self.current_param_db = None if "purge" in flags and os.path.exists(ensemble.full_name): os.remove(ensemble.full_name) self.master_db.query_master_db("DELETE FROM dbs WHERE full_name = ?", ensemble.full_name) if ensemble.full_name in self.master_db.result_dbs: del self.master_db.result_dbs[ensemble.full_name] # :::~ FIXME self.master_db.synchronise_master_db() def complete_deregister(self, text, line, begidx, endidx): return self.complete_init(text, line, begidx, endidx) def do_set_weight(self, c): flags, args, ensemble = self.get_flags_and_db(c) # print flags, args, ensemble if ensemble == None: return try: new_weight = float(args[0]) except: utils.newline_msg("ERR", "cannot parse weight") return # print "UPDATE dbs SET weight=%f WHERE full_name = '%s' " % ( new_weight, ensemble.full_name ) try: self.master_db.query_master_db( "UPDATE dbs SET weight=%f WHERE full_name = '%s' " % (new_weight, ensemble.full_name)) except: utils.newline_msg("ERR", "cannot parse command") return self.master_db.update_list_ensemble_dbs() # def do_set(self, c): # """set VAR1=VALUE1 VAR2=VALUE2 # sets some values in the currently loaded database # FLAGS::: --help, the possible keys are printed """ # # # print c # flags, c = self.parse_command_line(c) # # if "help" in flags: # print utils.newline_msg("HELP", " possible_keys = %s" % self.possible_keys) # return # # if not self.current_param_db: # utils.newline_msg("WRN", "not database loaded... skipping") # return # # for iarg in c: # ret = utils.parse_to_dict(iarg, allowed_keys=self.possible_keys) # if not ret: # utils.newline_msg("ERR", "'%s' not understood" % iarg) # return # # # if k == "repeat": continue # repeat is not in the master db (should it be added) # for k in ret: # self.current_param_db.__dict__[k] = ret[k] # self.master_db.query_master_db('UPDATE dbs SET %s= ? WHERE id = ?' % k, ret[k], # self.current_param_db.id) def __set_status(self, c, st): # if not c: # ls_res_db = [ self.current_param_db.full_name ] # else: # ls_res_db = self.filter_db_list( filter = c ) # if not ls_res_db: return # # for i in ls_res_db: flags, cmds, ensemble = self.get_flags_and_db(c) # ensemble = self.get_db(db_name) if ensemble is None: # utils.newline_msg("ERR", "database not found... aborting") return ensemble.status = st print(" +--- '%s' [status : %s ]" % (utils.shorten_name(ensemble.full_name), st)) self.master_db.query_master_db( 'UPDATE dbs SET status= ? WHERE full_name = ?', st, ensemble.full_name) # # def do_stop(self, c): # """stops the currently loaded registered database""" # self.__set_status(c, 'S') # def do_start(self, c): """starts the currently loaded registered database""" self.__set_status(c, 'R') def do_pause(self, c): """pauses the currently loaded registered database""" self.__set_status(c, 'P') def do_set_jobs(self, c): """sets the maximum number of jobs running concurrently usage: N_JOBS""" c = c.split() if len(c) == 1: max_jobs = int(c[0]) self.master_db.query_master_db( 'UPDATE queues SET max_jobs= ? WHERE name = "default"', max_jobs) def do_get_jobs(self, c): """returns the number of jobs that would concurrently run in a multi-threaded run""" nj, = self.master_db.query_master_fetchone( 'SELECT max_jobs FROM queues WHERE name = "default"') print(" +--- no_jobs = %d " % nj) def do_info(self, c): """info REGEXP prints the information of the results databases, filtered by a regular expression, or its id """ flags, cmds, ensemble = self.get_flags_and_db(c) # ensemble = self.get_db(db_name) if ensemble is None: # utils.newline_msg("ERR", "database not found... aborting") return db_status = ensemble.get_updated_status() param_db_id = self.master_db.query_master_fetchone( "SELECT id, status, weight FROM dbs WHERE full_name = ?", ensemble.full_name) if param_db_id is None: param_db_id = "X" else: [param_db_id, status, weight] = param_db_id print(" ---%5s: %s" % (param_db_id, utils.shorten_name(ensemble.full_name))) frac_done = float(db_status['process_done']) / float( db_status['value_set']) n_repet = db_status['value_set_with_rep'] / db_status['value_set'] print(" -+ status = %s / weight: %5.5f " % (status, weight)) print( " -+ total = %d*%d / done: %d (%.5f) - running: %d - error: %d " % (db_status['value_set'], n_repet, db_status['process_done'], frac_done, db_status['process_running'], db_status['process_error'])) try: print(" -+ time = %f / mean: %f - min: %f - max: %f" % (db_status['total_run_time'], db_status['avg_run_time'], db_status['min_run_time'], db_status['max_run_time'])) except: pass
class QueueCommandParser(cmd.Cmd): """command processor for the queues.""" def __init__(self): cmd.Cmd.__init__(self) self.prompt = "| spg-queue :::~ " self.current_queue = None self.master_db = SPGMasterDB() # self.possible_keys = ['max_jobs', 'status'] self.__update_queue_list() def __update_queue_list(self): self.queues = self.master_db.query_master_db("SELECT * FROM queues ORDER BY id") def do_load(self, c): """loads a queue as active one""" self.__update_queue_list() if len(filter(lambda x: x[1] == c, self.queues)): self.current_queue = c else: newline_msg("ERR", "queue '%s' not found" % c) print "loaded '%s'" % self.current_queue def completedefault(self, text, line, begidx, endidx): self.__update_queue_list() completions = [name for (id, name, max_jobs, status) in self.queues] if text: completions = [f for f in completions if f.startswith(text)] return completions def do_ls(self, c): """lists the queues in the master database """ self.__update_queue_list() for (id, name, max_jobs, status) in self.queues: print " %16s - J: %2d - S: '%s'" % (name, max_jobs, status) ### CREATE TABLE IF NOT EXISTS queues ### ( id INTEGER PRIMARY KEY, name CHAR(64), max_jobs INTEGER, ### status CHAR(1) ) def do_add(self, c): """adds queue queue""" self.__update_queue_list() if len(filter(lambda x: x[1] == c, self.queues)): newline_msg("ERR", "queue '%s' already exists" % queue, 2) return self.master_db.query_master_db("INSERT INTO queues (name, max_jobs, status) VALUES (?,?,?)", c, 1, "S") os.makedirs("%s/queue/%s" % (VAR_PATH, c)) self.current_queue = c def do_set_max_jobs(self, c): """sets the maximum number of jobs in the given queue usage: [regexp] N_JOBS""" c = c.split() if len(c) == 1 and self.current_queue: max_jobs = int(c[0]) self.master_db.query_master_db("UPDATE queues SET max_jobs= ? WHERE name = ?", max_jobs, self.current_queue) elif len(c) == 2: re = c[0] max_jobs = int(c[1]) # print re, max_jobs lsq = [n for (id, n, mj, s) in self.queues] lsq = fnmatch.filter(lsq, re) # print lsq for q in lsq: # print status, q self.master_db.query_master_db("UPDATE queues SET max_jobs= ? WHERE name = ?", max_jobs, q) def do_clean_pool(self, cmd): """cleans all the hanging pickled input/output files""" proc = Popen("rm -f %s/run/*" % VAR_PATH, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE) proc.wait() self.__update_queue_list() for (id, name, max_jobs, status) in self.queues: proc = Popen("rm -f %s/queue/%s/*" % (VAR_PATH, name), shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE) proc.wait() def do_remove(self, c): """sets the maximum number of jobs in the given queue usage: [regexp] N_JOBS""" c = c.split() if len(c) == 0 and self.current_queue: self.master_db.query_master_db("DELETE FROM queues WHERE name = ?", self.current_queue) elif len(c) == 1: re = c[0] # print re, max_jobs lsq = [n for (id, n, mj, s) in self.queues] lsq = fnmatch.filter(lsq, re) # print lsq for q in lsq: # print status, q self.master_db.query_master_db("DELETE FROM queues WHERE name = ?", q) # ret = utils.parse_to_dict(c, allowed_keys=self.possible_keys) # for k in ret: # self.values[k] = ret[k] # print "%s = %s" %(k,ret[k]) # if self.current_queue: # if ret.has_key('max_jobs'): # if ret.has_key('status'): # self.master_db.execute_query( 'UPDATE queues SET status= ? WHERE name = ?', self.current_queue ) # self.master_db.execute( 'UPDATE dbs SET status="%s" WHERE id = %s'% ( self.current_param_db.status, self.current_param_db.id ) ) # self.doc_header = "default values: %s"%(self.values ) def __set_status(self, status, name=None): # print name, status if not name and self.current_queue: # print 'UPDATE queues SET status= ? WHERE name = ?' self.master_db.query_master_db("UPDATE queues SET status= ? WHERE name = ?", status, self.current_queue) return lsq = [n for (id, n, max_jobs, s) in self.queues] if name: lsq = fnmatch.filter(lsq, name) # print lsq for q in lsq: # print status, q self.master_db.query_master_db("UPDATE queues SET status= ? WHERE name = ?", status, q) def do_stop(self, c): """stops the currently loaded registered database""" self.__set_status("S", c) def do_start(self, c): """starts the currently loaded registered database""" self.__set_status("R", c) def do_pause(self, c): """pauses the currently loaded registered database""" self.__set_status("P", c) def do_EOF(self, line): return True
def __init__(self): self.master_db = SPGMasterDB( EnsembleConstructor = ParameterEnsembleThreaded ) self.lock = threading.Lock() self.db_locks = {}
class SPGRunningPool(): def __init__(self, test_run=False): self.master_db = SPGMasterDB( EnsembleConstructor=ParameterEnsembleThreaded) self.test_run = test_run self.lock = threading.Lock() self.db_locks = {} ### :::~ The number of processes that are active in each spg file self.active_processes = defaultdict(lambda: 0) def get_lock(self, i_db): if i_db.full_name not in self.db_locks: self.db_locks[i_db.full_name] = threading.Lock() return self.db_locks[i_db.full_name] def launch_workers(self): target_jobs, = self.master_db.query_master_fetchone( 'SELECT max_jobs FROM queues WHERE name = "default"') self.master_db.update_list_ensemble_dbs() if len(self.master_db.active_dbs) == 0: utils.inline_msg("MSG", "No active dbs... sleeping ") return current_count = self.active_threads() # print "+++++++++++", to_launch vec_to_launch = [] launch = defaultdict(lambda: 0) running = {} for ae in self.master_db.active_dbs: ens = self.master_db.result_dbs[ae] running[ens['id']] = self.active_processes[ae] qty_to_launch = int( m.floor(0.5 + target_jobs * ens['weight'] / self.master_db.normalising) - self.active_processes[ae]) vec_to_launch += qty_to_launch * [ae] launch[ens['id']] += qty_to_launch # for id in launch: # print "+++ (%d) %d + %d = //%d//, "%( id, launch[id], running[id],launch[id]+running[id] ) # print to_launch = len(vec_to_launch) if to_launch >= 0: utils.newline_msg( "STATUS", utils.str_color( "@green[n_jobs=%d] run=%d %s ::: new=%d" % (target_jobs, current_count, dict(running), to_launch))) else: utils.newline_msg( "STATUS", utils.str_color( "@yellow[n_jobs=%d] run=%d :!: exceeded number" % (target_jobs, current_count))) # print to_launch, len( vec_to_launch ), launch # for i_t in range(to_launch): for ae in vec_to_launch: pick = self.master_db.EnsembleConstructor(ae, init_db=True) self.lock.acquire() # pick = self.master_db.pick_ensemble() pick.test_run = self.test_run status = pick.get_updated_status() if status['process_not_run'] == 0: print("+D+ ----- %s " % (pick.full_name)) self.master_db.query_master_db( 'UPDATE dbs SET status= ? WHERE full_name = ?', "D", pick.full_name) return self.lock.release() nt = SPGRunningAtom(pick, self.lock, self.active_processes) # nt.test_run = self.test_run # nt = SPGRunningAtom(pick, lock=self.get_lock( pick ) ) nt.start() def active_threads(self): return threading.active_count() - 1 #rp = SPGRunningPool(50, 2) #rp.run()
class SPGRunningPool(): def __init__(self, test_run = False): self.master_db = SPGMasterDB( EnsembleConstructor = ParameterEnsembleThreaded ) self.test_run = test_run self.lock = threading.Lock() self.db_locks = {} ### :::~ The number of processes that are active in each spg file self.active_processes = defaultdict(lambda : 0) def get_lock(self, i_db): if not self.db_locks.has_key( i_db.full_name ): self.db_locks[ i_db.full_name ] = threading.Lock() return self.db_locks[ i_db.full_name ] def launch_workers(self): target_jobs, = self.master_db.query_master_fetchone('SELECT max_jobs FROM queues WHERE name = "default"') self.master_db.update_list_ensemble_dbs() if len(self.master_db.active_dbs) == 0: utils.inline_msg("MSG", "No active dbs... sleeping ") return current_count = self.active_threads() # print "+++++++++++", to_launch vec_to_launch = [] launch = defaultdict(lambda: 0) running = {} for ae in self.master_db.active_dbs: ens = self.master_db.result_dbs[ae ] running[ ens['id'] ] = self.active_processes[ ae ] qty_to_launch = int( m.floor(0.5 + target_jobs*ens['weight']/self.master_db.normalising) - self.active_processes[ ae ] ) vec_to_launch += qty_to_launch * [ae] launch[ ens['id'] ] += qty_to_launch # for id in launch: # print "+++ (%d) %d + %d = //%d//, "%( id, launch[id], running[id],launch[id]+running[id] ) # print to_launch = len(vec_to_launch) if to_launch >= 0: utils.newline_msg("STATUS", utils.str_color( "@green[n_jobs=%d] run=%d %s ::: new=%d" % (target_jobs,current_count, dict(running),to_launch) ) ) else: utils.newline_msg("STATUS", utils.str_color( "@yellow[n_jobs=%d] run=%d :!: exceeded number" % (target_jobs,current_count)) ) # print to_launch, len( vec_to_launch ), launch # for i_t in range(to_launch): for ae in vec_to_launch: pick = self.master_db.EnsembleConstructor(ae, init_db=True) self.lock.acquire() # pick = self.master_db.pick_ensemble() pick.test_run = self.test_run status = pick.get_updated_status() if status['process_not_run'] == 0: print "+D+ ----- %s " % (pick.full_name) self.master_db.query_master_db('UPDATE dbs SET status= ? WHERE full_name = ?', "D", pick.full_name) return self.lock.release() nt = SPGRunningAtom(pick, self.lock, self.active_processes) # nt.test_run = self.test_run # nt = SPGRunningAtom(pick, lock=self.get_lock( pick ) ) nt.start() def active_threads(self): return threading.active_count() - 1 #rp = SPGRunningPool(50, 2) #rp.run()