def execute(self, name): try: self.state.saveState('READY') lp = LaunchPad(**self.db) lp.reset('', require_password=False) tasks = [] for idx, command in enumerate(self.commands): if idx > 0: tasks.append( Firework(ScriptTask.from_str(command), name=f'task_{idx}', fw_id=idx, parents=[tasks[idx - 1]])) else: tasks.append( Firework(ScriptTask.from_str(command), name=f'task_{idx}', fw_id=idx)) self.state.saveState('RUNNING') wf = Workflow(tasks, name=name) lp.add_wf(wf) rapidfire(lp) self.state.saveState('FINISHED') except Exception as e: print(e) self.state.saveState('ERROR')
def setUpClass(cls): cls.lp = None try: cls.lp = LaunchPad(name=TESTDB_NAME, strm_lvl='ERROR') cls.lp.reset(password=None, require_password=False) except: raise unittest.SkipTest('MongoDB is not running in localhost:27017! Skipping tests.')
def main(sequencing_directory, library_prefix, num_libraries, raw_data_dir): lpad = LaunchPad(**yaml.load(open("my_launchpad.yaml"))) workflow_fireworks = [] workflow_dependencies = collections.defaultdict(list) library_dirs = [ os.path.join(sequencing_directory, library_prefix + str(i + 1)) for i in xrange(num_libraries) ] subdirs = [ 'unzipped', 'trimmed', 'aligned_kallisto', 'bammed', 'sorted', 'counted', 'pythonized' ] for library_dir in library_dirs: seq_functions.make_directories(library_dir, subdirs) name = "Count_%s" % os.path.basename(library_dir) fw_count = Firework( [ CountTask(library_path=library_dir, aligned_name="aligned_kallisto", bammed_name="bammed", counted_name="counted", spikeids=['Spike1', 'Spike4', 'Spike7']) ], name=name, spec={"_queueadapter": { "job_name": name }}, ) workflow_fireworks.append(fw_count) lpad.add_wf(Workflow(workflow_fireworks, links_dict=workflow_dependencies))
def setUpClass(cls): cls.fworker = FWorker() try: cls.lp = LaunchPad(name=TESTDB_NAME, strm_lvl="ERROR") cls.lp.reset(password=None, require_password=False) except Exception: raise unittest.SkipTest( "MongoDB is not running in localhost:27017! Skipping tests.")
def __init__(self): self.simManager = SimManager() self.launchpad = LaunchPad() self.ids = [] self.fws = [] self.last = 0 self.rerun = False
def create_launchpad(username, password, server="serenity", lpadname=None): """ Creates the fireworks launchpad on specific preset servers. Args: username (str) : username for the mongodb database password (str) : password for the mongodb database server (str) : server name: "serinity" (default) or "atlas" lpadname (str) : name of the fireworks internal database. If not given, the name is inferred. Returns: fireworks object : Launchpad for internal fireworks use. """ if server == "atlas": name = username[:2] + "fireworks" lp = LaunchPad(host="austerity-shard-00-01-hgeov.mongodb.net:27017", port=27017, name=name, username=username, password=password, logdir=".", strm_lvl="INFO", ssl=True, authsource="admin") elif server == "serenity": if lpadname: name = lpadname else: name = username[:2] + "fireworks" lp = LaunchPad( host="nanolayers.dyndns.org:27017", port=27017, name=name, username=username, password=password, logdir=".", strm_lvl="INFO", #ssl = True, authsource=name) else: lp = LaunchPad() return lp
def run_workflows(): TESTDB_NAME = 'rsled' launchpad = LaunchPad(name=TESTDB_NAME) # clean up tw database if necessary launchpad.reset(password=None, require_password=False) launchpad.add_wf(wf_creator([1, 1, 2, "red"])) rapidfire(launchpad, nlaunches=23, sleep_time=0)
def main(sequencing_directory, library_prefix, num_libraries, raw_data_dir): lpad = LaunchPad(**yaml.load(open("my_launchpad.yaml"))) workflow_fireworks = [] workflow_dependencies = collections.defaultdict(list) library_dirs = [ os.path.join(sequencing_directory, library_prefix + str(i + 1)) for i in xrange(num_libraries) ] subdirs = ["aligned_star", "quant_rsem", "counted_rsem"] for library_dir in library_dirs: seq_functions.make_directories(library_dir, subdirs) for library_dir in library_dirs: seq_functions.make_directories(library_dir, subdirs) name = "AlignSTAR_%s" % os.path.basename(library_dir) fw_align = Firework( [ Align_star_Task(library_path=library_dir, trimmed_name="trimmed", aligned_name="aligned_star/", quant_name="quant_rsem/") ], name=name, spec={ "_queueadapter": { "job_name": name, "ntasks_per_node": 8, "walltime": '24:00:00' } }, ) workflow_fireworks.append(fw_align) name = "Count_%s" % os.path.basename(library_dir) fw_count = Firework( [ Count_rsem_Task(library_path=library_dir, aligned_name="aligned_star", quant_name="quant_rsem", counted_name="counted_rsem", spikeids=[ 'AM1780SpikeIn1', 'AM1780SpikeIn4', 'AM1780SpikeIn7' ]) ], name=name, spec={"_queueadapter": { "job_name": name }}, ) workflow_fireworks.append(fw_count) workflow_dependencies[fw_align].append(fw_count) lpad.add_wf(Workflow(workflow_fireworks, links_dict=workflow_dependencies))
def lp(request): lp = LaunchPad(name=TESTDB_NAME, strm_lvl='ERROR') lp.reset(password=None, require_password=False) def fin(): lp.connection.drop_database(TESTDB_NAME) # request.addfinalizer(fin) return lp
def main(algorithms, supports, datasets, working_directory, reset, launchpad_args): launchpad = LaunchPad(**launchpad_args) if reset: launchpad.reset("", require_password=False) working_directory = os.path.expandvars( os.path.expanduser(working_directory)) for dataset in datasets: for algorithm in algorithms: tasks = [] links = {} spec = {} spec["directory"] = os.path.join(working_directory, algorithm) spec["local_files"] = [os.path.abspath(dataset)] spec["_priority"] = 1000 try: short_working_directory = spec["directory"].replace( os.path.expanduser('~'), '~', 1) except ValueError: short_working_directory = spec["directory"] setup_firework_name = "setup {0}".format(short_working_directory) setup_firework = Firework(SetupWorkingDirectory(), spec=spec, name=setup_firework_name) tasks.append(setup_firework) for support in supports: spec = {} spec["database_filename"] = os.path.basename(dataset) spec["algorithm"] = algorithm spec["support"] = support spec["timings_filename"] = "{0}.{1}.{2}.timing".format( os.path.basename(dataset), algorithm, str(support).zfill(3)) spec["patterns_filename"] = "{0}.{1}.{2}.patterns.gz".format( os.path.basename(dataset), algorithm, str(support).zfill(3)) spec["_priority"] = support support_firework_name = "{0} {1}".format(algorithm, support) support_firework = Firework(SPMFTask(), spec=spec, name=support_firework_name) tasks.append(support_firework) links.setdefault(setup_firework, []).append(support_firework) workflow_name = "{0} {1}".format(os.path.basename(dataset), algorithm) workflow = Workflow(tasks, links, name=workflow_name) launchpad.add_wf(workflow)
def lpad(database, debug_mode): from fireworks import LaunchPad lpad = LaunchPad(name=database) lpad.reset("", require_password=False) yield lpad if not debug_mode: lpad.reset("", require_password=False) for coll in lpad.db.list_collection_names(): lpad.db[coll].drop()
def __init__(self, wfname, fxyz, fprot, reset=False): self.launchpad = LaunchPad() if reset: self.launchpad.reset('', require_password=False) self.wfname = wfname self.fxyz = fxyz self.fprot = fprot self.ftxyz = wfname + '.txyz' self.fkey = wfname + '.key' self.fref = wfname + '.hdf5' return
def run_workflows(): TESTDB_NAME = 'rsled' # clean up tw database if necessary launchpad = LaunchPad(name=TESTDB_NAME) launchpad.reset(password=None, require_password=False) launchpad.add_wf(wf_creator([1, 1, 2, "red"], launchpad, 3, my_kwarg=1)) # if n_launches > 24 for this particular example, the search space will be # exhausted and OptTask will throw an exception rapidfire(launchpad, nlaunches=24, sleep_time=0)
def populate_launchpad(software, systems, optimizer=None): """ A simple function to fill a workflow with a set of systems """ # load in fireworks launch_pad = yaml.load(open('../config/my_launchpad.yaml', 'r')) # this is messy, but it has to be done del launch_pad['ssl_ca_file'] del launch_pad['strm_lvl'] del launch_pad['user_indices'] del launch_pad['wf_user_indices'] lpad = LaunchPad(**launch_pad) # set up Abinit's input settings db_file = os.getcwd() + '/../config/db.json' for system_class in systems: # load in the json file systems = json.load( open( '{}{}.json'.format(base_dir + '/staging/structures/', system_class), 'rb')) parameters = json.load( open( '{}{}.json'.format(base_dir + '/staging/parameters/', system_class), 'rb')) # reformat into lists ids = [] systems_list = [] parameters_list = [] for id_, system in systems.items(): systems_list.append(system) parameters_list.append(parameters[id_]) ids.append(id_) # convert from pymatgen structures to ase atoms objects systems_list = [ AseAtomsAdaptor.get_atoms(Structure.from_dict(a)) for a in systems_list ] wf = get_ase_wflows( systems_list, parameters=parameters, calculator=pkg_info[software]['calculator'], to_db=True, db_file=db_file, optimizer=optimizer, calculator_module=pkg_info[software]['calculator_module'], identifiers=None) # add the workflow lpad.add_wf(wf)
def run_workflows(): TESTDB_NAME = 'rsled' launchpad = LaunchPad(name=TESTDB_NAME) # clean up tw database if necessary launchpad.reset(password=None, require_password=False) # load 10 batch workflows onto the launchpad for _ in range(10): launchpad.add_wf(wf_creator(random_guess(X_dim))) rapidfire(launchpad, nlaunches=10, sleep_time=0)
def get_launchpad(): ''' This function returns an instance of a `fireworks.LaunchPad` object that is connected to our FireWorks launchpad. Returns: lpad An instance of a `fireworks.LaunchPad` object ''' configs = read_rc('fireworks_info.lpad') configs['port'] = int(configs['port']) # Make sure that the port is an integer lpad = LaunchPad(**configs) return lpad
def setUp(self): lpad = LaunchPad(name="test_emmet") lpad.reset('', require_password=False) self.lpad = lpad self.nofilter = PropertyWorkflowBuilder( self.elasticity, self.materials, wf_elastic_constant, material_filter=None, lpad=self.lpad) self.nofilter.connect() self.filter = PropertyWorkflowBuilder( self.elasticity, self.materials, wf_elastic_constant, material_filter={"task_id": {"$lt": 3}}, lpad=self.lpad) self.filter.connect()
def main(reset, launchpad_args, rocket_args): if rocket_args["m_dir"]: try: os.makedirs(rocket_args["m_dir"]) except OSError: pass launchpad = LaunchPad(**launchpad_args) if reset: launchpad.reset("", require_password=False) rapidfire(launchpad=launchpad, **rocket_args)
def create_launchpad(db_config_file): """use to create a FW launchpad using mongodb creds from file""" config = SafeConfigParser() config.read(db_config_file) db = config['db'] lpad = LaunchPad(host=db['host'], port=int(db['port']), name=db['name'], username=db['username'], password=db['password']) return lpad
def launchpad(launchpad_file=None, lpad_name="base"): """ Script to set up the configuration of the launchpad for accessing the workflow server. Args: launchpad_file (str): my_launchpad.yaml file from which to load the mongoDB database details. lpad_name (str): Name to give to the database in the configuration setup. Returns: None """ if launchpad_file: lpad = LaunchPad.from_file(launchpad_file) else: host = input("Please provide the server host: ") port = int(input("Please provide the port number: ")) name = input("Please provide the server name: ") username = input("Please provide your username: "******"Please provide your password: "******"admin") # Test the launchpad print("\nAttempting connection to mongoDB database...") _ = lpad.get_fw_ids() print("Connection successful!\n") config_lpad_file = os.path.join(os.path.expanduser("~"), ".workflow_config", "launchpad", lpad_name + "_launchpad.yaml") try: os.makedirs( os.path.join(os.path.expanduser("~"), ".workflow_config", "launchpad")) except FileExistsError: pass lpad.to_file(config_lpad_file) print("Launchpad file written to " + config_lpad_file + "\n")
def initiate_cluster(inputs): # check how many image folders are there contents_list = multi_call(inputs) lpad = LaunchPad(**yaml.load(open(join(celltkroot, "fireworks", "my_launchpad.yaml")))) wf_fws = [] for contents in contents_list: fw_name = "cluster_celltk" fw = Firework(clustercelltk(contents=contents), name = fw_name, spec = {"_queueadapter": {"job_name": fw_name, "walltime": "47:00:00"}}, ) wf_fws.append(fw) # end loop over input values workflow = Workflow(wf_fws, links_dict={}) lpad.add_wf(workflow)
def submit(self): from fireworks import LaunchPad if self.new(): launchpad = LaunchPad(host='suncatls2.slac.stanford.edu', name='krisbrown', username='******', password='******') wflow = self.wflow() launchpad.add_wf(wflow) time.sleep( 2 ) # folder names are unique due to timestamp to the nearest second return 1 else: print 'repeat!' return 0
def load_csets(func, optimizer_names): """ Load sets of collections for visualizing rocketsled optimization benchmarks Args: optimizer_names: func: Returns: """ csets = [] lpad = LaunchPad(host='localhost', port=27017, name=func) for on in optimizer_names: csets.append( [getattr(lpad.db, "{}{}".format(on, i)) for i in range(100)]) return csets
def submit(self, listOfIncompleteJobStrs=[]): """ use manageIncompleteJobs.listOfIncompleteJobStrs()""" from fireworks import LaunchPad self.generalCheck() if self.new(listOfIncompleteJobStrs): launchpad = LaunchPad(host='suncatls2.slac.stanford.edu', name='krisbrown', username='******', password='******') wflow = self.wflow() launchpad.add_wf(wflow) time.sleep( 2 ) # folder names are unique due to timestamp to the nearest second return 1 else: print 'Repeat job!' return 0
def initiate_cluster(ia_path, args): # check how many image folders are there imgdirs = read_imgdirs_from_parentdir(ia_path) if args.skip: imgdirs = ignore_if_df_existed(imgdirs, ia_path) lpad = LaunchPad(**yaml.load(open("my_launchpad.yaml"))) wf_fws = [] for iv in imgdirs: # start loop over input val fw_name = "clustercovertrack" fw = Firework( clustercovertrack(input_args_path=ia_path, imgdir=iv, args=args), name = fw_name, spec = {"_queueadapter": {"job_name": fw_name, "walltime": "47:00:00"}}, ) wf_fws.append(fw) # end loop over input values workflow = Workflow(wf_fws, links_dict={}) lpad.add_wf(workflow)
def submit(self): from fireworks import LaunchPad launchpad = LaunchPad(host='suncatls2.slac.stanford.edu', name='krisbrown', username='******', password='******') if self.jobkind == 'bulkrelax': wflow = self.submitBulkRelax() elif self.jobkind == 'relax': wflow = self.submitRelax() elif self.jobkind == 'vib': wflow = self.submitVib() elif self.jobkind == 'neb': wflow = self.submitNEB() elif self.jobkind == 'dos': wflow = self.submitDOS() print "Submitting job with ID = %d" % self.jobid updateDB('status', 'jobid', self.jobid, 'queued', None, 'job') launchpad.add_wf(wflow) """if query1('status','jobid',self.jobid,'job')=='initialized': updateStatus(self.jobid,'initialized','queued') """
def main(): # set up the LaunchPad and reset it launchpad = LaunchPad() launchpad.reset('', require_password=False) # Build the flow nflows = 2 for i in range(nflows): flow = build_flow("flow_" + str(i)) flow.build_and_pickle_dump() # create the Firework consisting of a single task firework = Firework(FireTaskWithFlow(flow=flow)) # store workflow launchpad.add_wf(firework) #launch it locally #launch_rocket(launchpad) return 0
def main(sequencing_directory, library_prefix, num_libraries, raw_data_dir): lpad = LaunchPad(**yaml.load(open("my_launchpad.yaml"))) workflow_fireworks = [] workflow_dependencies = collections.defaultdict(list) library_dirs = [os.path.join(sequencing_directory, library_prefix + str(i + 1)) for i in xrange(num_libraries)] subdirs = ['unzipped', 'trimmed', 'aligned', 'aligned_star', 'quant_rsem','bammed', 'sorted', 'counted'] for library_dir in library_dirs: seq_functions.make_directories(library_dir, subdirs) name = "AlignSTAR_%s" % os.path.basename(library_dir) fw_count = Firework( [ AlignSTARTask(library_path = library_dir, trimmed_name = "trimmed", aligned_name = "aligned_star/", quant_name = "quant_rsem/") ], name = name, spec = {"_queueadapter": {"job_name": name, "ntasks_per_node": 8}}, ) workflow_fireworks.append(fw_count) lpad.add_wf( Workflow(workflow_fireworks, links_dict = workflow_dependencies) )
get_z = 'turboworks_examples.test_perovskites.get_z' n_cands = 20 n_runs = 20 filename = 'perovskites_{}_{}_{}cands_{}runs.p'.format( predictor, TESTDB_NAME, n_cands, n_runs) Y = [] for i in range(n_runs): rundb = TESTDB_NAME + "_{}".format(i) conn = MongoClient('localhost', 27017) db = getattr(conn, rundb) collection = db.test_perovskites filedir = os.path.dirname(os.path.realpath(__file__)) launchpad = LaunchPad(name=rundb) launchpad.reset(password=None, require_password=False) launchpad.add_wf( wf_creator(random.choice(space_noex), predictor, get_z, launchpad, filedir + '/space_gs_cat_included.p', chemical_rules=True)) y = [] cands = 0 while cands != n_cands: launch_rocket(launchpad) cands = collection.find({'yi': 30.0}).count() y.append(cands)
"translation_supply": TRANSLATION_SUPPLY, } for key, value in metadata.iteritems(): if not isinstance(value, basestring): continue filepath.write_file(os.path.join(METADATA_DIRECTORY, key), value) with open(os.path.join(METADATA_DIRECTORY, constants.SERIALIZED_METADATA_FILE), "wb") as f: cPickle.dump(metadata, f, cPickle.HIGHEST_PROTOCOL) #### Create workflow # Create launchpad with open(LAUNCHPAD_FILE) as f: lpad = LaunchPad(**yaml.safe_load(f)) # Store list of FireWorks wf_fws = [] # Store links defining parent/child dependency relationships of FireWorks wf_links = collections.defaultdict(list) ### Initialize KB filename_raw_data = constants.SERIALIZED_RAW_DATA filename_sim_data_modified = constants.SERIALIZED_SIM_DATA_MODIFIED fw_name = "InitRawData"