def get_worker(): host = database.get_host() idb = database.isolated_database() idb.download() for worker in idb.workers: if worker.host == host: return worker print "Error: get_worker: %s is an unknown working host. Break." % host exit(1)
def get_worker(): host=database.get_host() idb=database.isolated_database() idb.download() for worker in idb.workers: if worker.host==host: return worker print "Error: get_worker: %s is an unknown working host. Break." % host exit(1)
def main(): parser = argparse.ArgumentParser(description='Update database for energy shifts in EuO and substrate on remote database') parser.add_argument('input', nargs='+', help='Folders containing results of isolated material runs or folders containing subfolders with results') parser.add_argument('--dry', action='store_true', help='Simulate updating of database') parser.add_argument('--archive', action='store_true', help='Archive all results') args = parser.parse_args() # get host host=database.get_host() idb=database.isolated_database() idb.download() found=False serverdir='' clientdir='' for worker in idb.workers: if host==worker.host: serverdir=worker.serverdir clientdir=worker.clientdir found=True if not found: print "Error: Isolated database remote: Unknow host: %s" % host print "Break." exit(1) # get path on steinschal-tradigist inputs=[] for ipath in args.input: # get absolute path apath=os.path.abspath(ipath) if not apath.startswith(clientdir): print "Error: Isolated database remote: %s is an unknown run path. Break." % apath exit(1) inputs.append(apath.replace(clientdir, serverdir, 1)) cmd='/users/stollenw/projects/euo/tools/euoscripts/isolated_update.py' for inp in inputs: cmd+=" %s" % inp if args.dry: cmd+=" --dry" if args.archive: cmd+=" --archive" try: rcmd=['ssh', 'steinschal-tradigist.th.physik.uni-bonn.de', '%s' % cmd] subprocess.call(rcmd) except: print "Unable to update remote database. Break." exit(1)
def __init__(self, np, material, N=5, M=None, ni=0.01, ncr=None, dW=None, output=None, input=None, initial_input=None, inputFlag=True, isoDeltaFlag=True, updatedbFlag=True, iteration_parameter=None, get_default_iteration_parameter=None, check_database=False, source=None, input_system_name=None, log='run', verbose=True, email='*****@*****.**', mailcmd='mailx -s'): # number of nodes self.np = np # material name self.material = material # number of left layers self.N = N # number of right layers self.M = M # number of charge carriers in the left system self.ni = ni # number of charge carriers in the right system self.ncr = ncr # workfunction difference between left and right system self.dW = dW # initial input folder self.initial_input = initial_input # search automatically for suitable input self.inputFlag = inputFlag # add isolated delta values automatically self.isoDeltaFlag = isoDeltaFlag # update databases after succesfful runs automatically self.updatedbFlag = updatedbFlag # additional parameter (like max2, wr1, etc.) # user defined parameter (if not defined add nothing) if iteration_parameter != None: self.iteration_parameter = iteration_parameter else: self.iteration_parameter = '' # function which gives the default iteration parameter depending on the material # (only relevant for automatic isodelta runs) if get_default_iteration_parameter != None: self.get_default_iteration_parameter = get_default_iteration_parameter else: self.get_default_iteration_parameter = database.get_iteration_parameter # check database before a run, if it exists don't run again self.check_database = check_database # source for searching suitable input ('local', 'remote' or None(=both)) self.source = source # alternative system name which can serve as an input (only if source!=local) self.input_system_name = input_system_name # logfile name self.log = log # email address self.email = email # send intermediate notifications self.verbose = verbose # email command self.mailcmd = mailcmd # keep an instance of the system parameter class for later use self.sp = system_parameter.system_parameter() # keep an instance of bulk database for later use self.bdb = database.bulk_database() self.bdb.download() # keep an instance of isolated database for later use self.idb = database.isolated_database() self.idb.download() # keep an instance of heterostructure database for later use self.hdb = database.heterostructure_database() self.hdb.download() # get material class self.material_class = self.sp.get_system_by_name( self.material).material_class # get mpicmd self.mpicmd = get_worker().mpicmd # get name which defines the system self.name = None if self.material_class == 'bulk': self.name = self.bdb.get_output(self.material, self.ni) elif self.material_class == 'isolated': self.name = self.idb.get_output(self.material, self.N, self.ni) else: self.name = self.hdb.get_output(self.material, self.N, self.M, self.ni, self.ncr, self.dW) # set top output folder to current working directory by default if output == None: if self.material_class == 'bulk': self.output = self.bdb.get_output(self.material, self.ni) elif self.material_class == 'isolated': self.output = self.idb.get_output(self.material, self.N, self.ni) else: self.output = self.hdb.get_output(self.material, self.N, self.M, self.ni, self.ncr, self.dW) else: self.output = output # set top input search folder to output folder by default if input == None: self.input = self.output else: self.input = input # host self.host = database.get_host()
def main(): parser = argparse.ArgumentParser( description='Update database for bulk results') parser.add_argument( 'input', nargs='*', help= 'Folders containing results of bulk runs or folders containing subfolders with results' ) parser.add_argument('--dry', action='store_true', help='Simulate updating of database') parser.add_argument('--archive', action='store_true', help='Archive all results') args = parser.parse_args() # get host host = database.get_host() bdb = database.bulk_database() bdb.download() found = False serverdir = '' clientdir = '' for worker in bdb.workers: if host == worker.host: serverdir = worker.serverdir clientdir = worker.clientdir found = True if not found: print "Error: Bulk database remote: Unknow host: %s" % host print "Break." exit(1) # get path on steinschal-tradigist inputs = [] for ipath in args.input: # get absolute path apath = os.path.abspath(ipath) if not apath.startswith(clientdir): print "Error: Bulk database remote: %s is an unknown run path. Break." % apath exit(1) inputs.append(apath.replace(clientdir, serverdir, 1)) cmd = '/users/stollenw/projects/euo/tools/euoscripts/bulk_update.py' for inp in inputs: cmd += " %s" % inp if args.dry: cmd += " --dry" if args.archive: cmd += " --archive" try: rcmd = [ 'ssh', 'steinschal-tradigist.th.physik.uni-bonn.de', '%s' % cmd ] subprocess.call(rcmd) except: print "Unable to update remote database. Break." exit(1)
def __init__(self, np, material, N=5, M=None, ni=0.01, ncr=None, dW=None, output=None, input=None, initial_input=None, inputFlag=True, isoDeltaFlag=True, updatedbFlag=True, iteration_parameter=None, get_default_iteration_parameter=None, check_database=False, source=None, input_system_name=None, log='run', verbose=True, email='*****@*****.**', mailcmd='mailx -s'): # number of nodes self.np=np # material name self.material=material # number of left layers self.N=N # number of right layers self.M=M # number of charge carriers in the left system self.ni=ni # number of charge carriers in the right system self.ncr=ncr # workfunction difference between left and right system self.dW=dW # initial input folder self.initial_input=initial_input # search automatically for suitable input self.inputFlag=inputFlag # add isolated delta values automatically self.isoDeltaFlag=isoDeltaFlag # update databases after succesfful runs automatically self.updatedbFlag=updatedbFlag # additional parameter (like max2, wr1, etc.) # user defined parameter (if not defined add nothing) if iteration_parameter!=None: self.iteration_parameter=iteration_parameter else: self.iteration_parameter='' # function which gives the default iteration parameter depending on the material # (only relevant for automatic isodelta runs) if get_default_iteration_parameter!=None: self.get_default_iteration_parameter=get_default_iteration_parameter else: self.get_default_iteration_parameter=database.get_iteration_parameter # check database before a run, if it exists don't run again self.check_database=check_database # source for searching suitable input ('local', 'remote' or None(=both)) self.source=source # alternative system name which can serve as an input (only if source!=local) self.input_system_name=input_system_name # logfile name self.log=log # email address self.email=email # send intermediate notifications self.verbose=verbose # email command self.mailcmd=mailcmd # keep an instance of the system parameter class for later use self.sp=system_parameter.system_parameter() # keep an instance of bulk database for later use self.bdb=database.bulk_database() self.bdb.download() # keep an instance of isolated database for later use self.idb=database.isolated_database() self.idb.download() # keep an instance of heterostructure database for later use self.hdb=database.heterostructure_database() self.hdb.download() # get material class self.material_class=self.sp.get_system_by_name(self.material).material_class # get mpicmd self.mpicmd=get_worker().mpicmd # get name which defines the system self.name=None if self.material_class=='bulk': self.name=self.bdb.get_output(self.material, self.ni) elif self.material_class=='isolated': self.name=self.idb.get_output(self.material, self.N, self.ni) else: self.name=self.hdb.get_output(self.material, self.N, self.M, self.ni, self.ncr, self.dW) # set top output folder to current working directory by default if output==None: if self.material_class=='bulk': self.output=self.bdb.get_output(self.material, self.ni) elif self.material_class=='isolated': self.output=self.idb.get_output(self.material, self.N, self.ni) else: self.output=self.hdb.get_output(self.material, self.N, self.M, self.ni, self.ncr, self.dW) else: self.output=output # set top input search folder to output folder by default if input==None: self.input=self.output else: self.input=input # host self.host=database.get_host()
def main(): parser = argparse.ArgumentParser(description='Calculate conductivity out of euo program results') parser.add_argument('-d', '--database', help='specify database') parser.add_argument('-s', '--dataset', nargs='+', help='specify dataset without temperature') parser.add_argument('-n', '--np', default=1, help='Number of processes for parallel use', type=int) parser.add_argument('-i', '--input', help='Input folder containing result of a single run.') parser.add_argument('-o', '--output', default=None, help='Output Folder for the single run results (optional, default=input)') parser.add_argument('--no_isodelta_db', action='store_false', help='No fetching of more accurate isodeltas in the case of heterostructures') parser.add_argument('--no_overwrite', action="store_true", help='recalculate conductivity, even if it is present in the database') #parser.add_argument('--force_download', action="store_true", help='Download *all* results from database, even if they exist in the current folder') args = parser.parse_args() # remote or steinschal-tradigist host=database.get_host() # get mpi run command depening on host mpicmd=get_worker().mpicmd # add current working directory to system path sys.path.append(os.getcwd()) if not args.input==None: getcond(args.input, np=args.np, isodeltadb=args.no_isodelta_db, outputFolder=args.output) else: if not args.database in ('bulk', 'isolated', 'hetero'): print "Database must be 'bulk', 'isolated' or 'hetero'" exit(1) db=None corenames=None isodeltadb=False if args.database=='bulk': db=database.bulk_database() corenames=('material', 'ni', 'T') filenames=("cond.dat", "resist.dat") top_result_folder = "/users/stollenw/projects/euo/results/bulk/" elif args.database=='isolated': db=database.isolated_database() corenames=('material', 'N', 'ni', 'T') top_result_folder = "/users/stollenw/projects/euo/results/isolated/" filenames=("cond.dat", "resist.dat", "cond_perp.dat", "resist_perp.dat", "cond_perp_matrix.dat", "resist_perp_matrix.dat") else: db=database.heterostructure_database() corenames=('material', 'N', 'M', 'ni', 'ncr', 'dW', 'T') top_result_folder = "/users/stollenw/projects/euo/results/heterostructure/" filenames=("cond.dat", "resist.dat", "cond_perp.dat", "resist_perp.dat", "cond_perp_matrix.dat", "resist_perp_matrix.dat") isodeltadb=args.no_isodelta_db db.download() # get filtered data, i.e. reduce according to args.dataset (if not given, only sort) filtered_data=database.filtrate(db.data, corenames, args.dataset) for fd in filtered_data: print fd result_folder = fd[-1] + "/" # check if conductivity calculation was already performed print "check existence ..." exists=check_file_exists(host, '%s/results/%s' % (result_folder, filenames[0])) # calculate conductivity if necessary or forced if not exists or not args.no_overwrite: print "calculate conductivity ..." getcondremote(host, db, filenames, result_folder, mpicmd, args.np, isodeltadb)