Exemplo n.º 1
0
def database_exists(sp):
    if sp.get_system().material_class == 'bulk':
        bdb = database.bulk_database()
        bdb.download()
        if bdb.exists(sp.get_system().name, sp.concentration, sp.temperature):
            return True
        else:
            return False

    elif sp.get_system().material_class == 'isolated':
        idb = database.isolated_database()
        idb.download()
        if idb.exists(sp.get_system().name, sp.N, sp.concentration,
                      sp.temperature):
            return True
        else:
            return False
    else:
        hdb = database.heterostructure_database()
        hdb.download()
        if hdb.exists(sp.get_system().name, sp.N, sp.N0, sp.concentration,
                      sp.n_cr, sp.Delta_W, sp.temperature):
            return True
        else:
            return False
Exemplo n.º 2
0
def main():
	parser = argparse.ArgumentParser(description='Update database for heterostructure runs')
	parser.add_argument('input', nargs=2, help='Two database files')
  	parser.add_argument('-d', '--database', default='hetero', help='Database type: bulk, isolated or hetero (default)')
  	parser.add_argument('-r', '--reverse', action='store_true', help='reverse input file order')

	args = parser.parse_args()
	
	#print args.overwrite
	#print args.input
	#print args.database
	if args.reverse:
		args.input.reverse()	

	# initialize databases
	d1=None
	d2=None
	length=None
	if args.database=='hetero':
		d1=database.heterostructure_database()
		d2=database.heterostructure_database()
		length=6
	elif args.database=='isolated':
		d1=database.isolated_database()
		d2=database.isolated_database()
		length=3
	elif args.database=='bulk':
		d1=database.bulk_database()
		d2=database.bulk_database()
		length=2
	else:
		print "Database type must be 'bulk', 'isolated' or 'hetero' (default). Break"
		exit(1)
	d1.download("[email protected]:%s" % args.input[0])
	d2.download("[email protected]:%s" % args.input[1])

	print "Datasets present in %s but not in %s:" % (args.input[0], args.input[1])
	reduced_data2=[dset2[:length] for dset2 in d2.data]
	for dset1 in d1.data:
		if not dset1[:length] in reduced_data2:
			print dset1[:-1]
Exemplo n.º 3
0
def main():
	dataset_help="""Specify dataset 
	
e.g. "Metal-Metal-Heterostructure 5 9 0.01 0.01 0.125" 
(for material, N, M, ni, ncr and dW). 

You may use "all" as a placeholder or do not specify 
the last values e.g. "all 5 all 0.01

"""

	parser = argparse.ArgumentParser(description='Update database for heterostructure runs')
	parser.add_argument('input', nargs='*', help='Folders containing results of heterostructure material runs or folders containing subfolders with results')
  	parser.add_argument('-d', '--database', default='/users/stollenw/projects/euo/database/hetero.db', help='Database file name')
	parser.add_argument('-s', '--dataset', nargs='*', help=dataset_help)
  	parser.add_argument('--overwrite', action='store_true', help='Overwrite database')
  	parser.add_argument('--archive', action='store_true', help='Archive all results')
  	parser.add_argument('--archive_destination', default='/users/stollenw/projects/euo/results/heterostructure/', help='Archive folder')
  	parser.add_argument('--dry', action='store_true', help='Simulate updating of database')

	args = parser.parse_args()
	
	#print args.overwrite
	#print args.input
	#print args.database

	# initialize database
	t=database.heterostructure_database()
	# read in database if it already exists and overwrite flag is not given
	if os.path.exists(args.database) and not args.overwrite:
		t.read(args.database)

	if args.dataset==None:
		t.fill(args.input, args.overwrite)
		if not args.dry:
			t.write(args.database)
			if args.archive:
				t.archive(args.archive_destination)
			else:
				for iput in args.input:
					t.archive(args.archive_destination, None, os.path.abspath(iput))
		else:
			print "Archive folder would be: ", args.archive_destination
	else:
		t.archive(args.archive_destination, args.dataset)
Exemplo n.º 4
0
def database_exists(sp):
	if sp.get_system().material_class=='bulk':
		bdb=database.bulk_database()
		bdb.download()
		if bdb.exists(sp.get_system().name, sp.concentration, sp.temperature):
			return True
		else:
			return False

	elif sp.get_system().material_class=='isolated':
		idb=database.isolated_database()
		idb.download()
		if idb.exists(sp.get_system().name, sp.N, sp.concentration, sp.temperature):
			return True
		else:
			return False
	else:
		hdb=database.heterostructure_database()
		hdb.download()
		if hdb.exists(sp.get_system().name, sp.N, sp.N0, sp.concentration, sp.n_cr, sp.Delta_W, sp.temperature):
			return True
		else:
			return False
Exemplo n.º 5
0
def main():
    parser = argparse.ArgumentParser(
        description='Analyse euo program results',
        formatter_class=argparse.RawTextHelpFormatter)
    keyword_help = """Calculate the temperature dependent 
quantity specified by one of the following keywords

print
print full
tc

occNum_c		(for bulk)
dopant_activation	(for bulk)
totalmag		(for bulk)
cond			(for bulk)
resist			(for bulk)
	
avmag			(for isolated and heterostructures)
cond_para		(for isolated and heterostructures)
resist_para		(for isolated and heterostructures)
cond_perp		(for isolated and heterostructures)
resist_perp 		(for isolated and heterostructures)
isodelta 		(energy shift (-mu) for isolated systems)

"""

    dataset_help = """Specify dataset 
	
e.g. "Metal-Metal-Heterostructure 5 9 0.01 0.01 0.125" 
(for material, N, M, ni, ncr and dW). 

You may use "all" as a placeholder or do not specify 
the last values e.g. "all 5 all 0.01

"""

    parser.add_argument('keyword', help=keyword_help)
    parser.add_argument(
        '-d',
        '--database',
        help='Type of database: "bulk", "isolated" or "hetero"')
    parser.add_argument('-s', '--dataset', nargs='*', help=dataset_help)
    parser.add_argument('-o',
                        '--output',
                        default='/users/stollenw/projects/euo/analysis/',
                        help='Output folder (optional)')
    parser.add_argument('--dbpath', help='Path to database file (optional)')
    parser.add_argument('--resultpath',
                        default='/users/stollenw/projects/euo/results/',
                        help='Path to results (optional)')
    parser.add_argument(
        '--temperatures',
        nargs='*',
        default=None,
        help='Tempertures for tc search (optional, only for tc)',
        type=float)
    parser.add_argument(
        '--tsteps',
        nargs='*',
        default=None,
        help='Temperture steps for tc search (optional, only for tc)',
        type=float)
    parser.add_argument(
        '--dM',
        default=None,
        help='Magnetisation resolution for tc search (optional, only for tc)',
        type=float)
    parser.add_argument(
        '--layer',
        default=0,
        help='Layer to calculate parallel conductivity/resistivity in',
        type=int)
    parser.add_argument(
        '--layerx',
        help='First layer for perpendicular conductivity/resistivity',
        type=int)
    parser.add_argument(
        '--layery',
        help='First layer for perpendicular conductivity/resistivity',
        type=int)

    args = parser.parse_args()

    if not args.database in ('bulk', 'isolated', 'hetero'):
        parser.print_help()
        exit(0)

    # allowed keywords
    print_keywords = ['print', 'printfull']
    simple_result_keywords = None
    sophisticated_result_keywords = None
    if args.database == 'bulk':
        simple_result_keywords = ['cond', 'resist', 'totalmag']
        sophisticated_result_keywords = ['tc', 'dopant_activation', 'occNum_c']
    elif args.database == 'isolated' or args.database == 'hetero':
        simple_result_keywords = ['avmag']
        sophisticated_result_keywords = [
            'cond_para', 'resist_para', 'cond_perp', 'resist_perp', 'isodelta',
            'tc'
        ]

    # keywords that produce results
    result_keywords = simple_result_keywords + sophisticated_result_keywords
    # all keywords (including print keywords)
    allowed_keywords = simple_result_keywords + sophisticated_result_keywords + print_keywords

    # check if valid keyword was given
    if not args.keyword in allowed_keywords:
        parser.print_help()
        print "Allowed keywords are:"
        for ak in allowed_keywords:
            print ak
        exit(0)

    # set output
    output = args.output

    db = None
    corenames = None
    special = None
    subResultFolder = None
    if args.database == 'bulk':
        db = database.bulk_database()
        output = output + 'bulk/'
        subResultFolder = 'bulk/'
        corenames = ('material', 'ni', 'T')
        special = 'mag'
    elif args.database == 'isolated':
        db = database.isolated_database()
        output = output + 'isolated/'
        subResultFolder = 'isolated/'
        corenames = ('material', 'N', 'ni', 'T')
        special = 'isodelta'
    else:
        db = database.heterostructure_database()
        output = output + 'hetero/'
        subResultFolder = 'heterostructure/'
        corenames = ('material', 'N', 'M', 'ni', 'ncr', 'dW', 'T')
        special = 'avmag'

    if args.dbpath == None:
        db.download()
    else:
        db.download("[email protected]:%s" %
                    args.dbpath)

    resultFolder = args.resultpath

    # get filtered data, i.e. reduce to defining properties without temperature
    filtered_data = database.filtrate(db.data, corenames, args.dataset,
                                      len(corenames) - 1)

    # lower threshold for displaying results
    min_result_number = 1
    # extract conductivity or other observables
    if args.keyword in result_keywords:
        # temperatrue sweep only makes sense if there are at least two temperatures
        min_result_number = 2
        # create folder if necessary
        suboutput = output + "/data/%s/" % args.keyword
        if not os.path.exists(suboutput):
            os.makedirs(suboutput)

    # in the Curie temperature case, create single file for all datasets
    outfile = ''
    tcd = ''
    tci = 0
    if args.keyword == 'tc' or args.keyword == 'dopant_activation' or args.keyword == 'occNum_c':
        if args.dataset == None:
            print "Dataset needed for Curie temperature / dopant activation at T=5K / occNum at T=5K"
            exit(1)
        # check if number single attribute is filterd out
        if (args.dataset.count('all') +
            (len(corenames) - 1 - len(args.dataset)) > 1):
            print "Dataset has to many degrees of freedom."
            exit(1)
        tcc = []
        i = 0
        for d, n in zip(args.dataset, corenames):
            if d != 'all':
                if n == 'material':
                    tcc.append(d)
                elif n == 'N' or n == 'M':
                    tcc.append(n + "%03i" % int(d))
                else:
                    tcc.append(n + "%06.4f" % float(d))
            else:
                tcd = n
                tci = i
            i = i + 1
        if (tcd == ''):
            tcd = corenames[-2]
            tci = len(corenames) - 2
        tcname = '_'.join(tcc)
        outfile = "%s/%s_%s.dat" % (suboutput, args.keyword, tcname)
        #remove file if it already exists
        f = open(outfile, 'w')
        if args.keyword == 'tc':
            f.write("# %s\tCurie temperature Tc\tAccuracy of Tc\n" % tcd)
        elif args.keyword == 'dopant_activation':
            f.write("# %s\tdopant activation (n_c/n_i)\n" % tcd)
        elif args.keyword == 'occNum_c':
            f.write("# %s\tConduction band occupation number (n_c)\n" % tcd)
        f.close()

    # iterate through database
    for fd in filtered_data:
        # defining name
        material_folder = db.get_output(*fd)
        namestr = material_folder.rstrip('/')
        # get all datasets corresponding to fd (different temperatures)
        temperature_datasets = database.filtrate(db.data, corenames, fd)
        if args.keyword in simple_result_keywords:
            # extract data from relevant folders
            cmd = 'cat '
            for td in temperature_datasets:
                temperature_folder = db.get_temp_output(td[len(corenames) - 1])
                #cmd=cmd + "%s/results/%s.dat " % (fd[-1], args.keyword)
                cmd = cmd + "%s/%s/%s/%s/results/%s.dat " % (
                    resultFolder, subResultFolder, material_folder,
                    temperature_folder, args.keyword)
            cmd = cmd + " > %s/%s_%s.dat" % (suboutput, args.keyword, namestr)
            subprocess.call(cmd, shell=True)

        elif args.keyword in sophisticated_result_keywords:
            if args.keyword == 'cond_para':
                key = args.keyword
                if (args.layer != 0):
                    key = "%s_layer%03i" % (args.keyword, args.layer)
                outfile = "%s/%s_%s.dat" % (suboutput, key, namestr)
                f = open(outfile, 'w')
                for td in temperature_datasets:
                    temperature_folder = db.get_temp_output(td[len(corenames) -
                                                               1])
                    filename = "%s/%s/%s/%s/results/%s.dat" % (
                        resultFolder, subResultFolder, material_folder,
                        temperature_folder, 'cond')
                    value = float(read(filename, line=args.layer)[1])
                    temp = td[len(corenames) - 1]
                    f.write("%0.17e\t%0.17e\n" % (temp, value))
                f.close()

            if args.keyword == 'resist_para':
                key = args.keyword
                if (args.layer != 0):
                    key = "%s_layer%03i" % (args.keyword, args.layer)
                outfile = "%s/%s_%s.dat" % (suboutput, key, namestr)
                f = open(outfile, 'w')
                for td in temperature_datasets:
                    temperature_folder = db.get_temp_output(td[len(corenames) -
                                                               1])
                    filename = "%s/%s/%s/%s/results/%s.dat" % (
                        resultFolder, subResultFolder, material_folder,
                        temperature_folder, 'resist')
                    value = float(read(filename, line=args.layer)[1])
                    temp = td[len(corenames) - 1]
                    f.write("%0.17e\t%0.17e\n" % (temp, value))
                f.close()

            if args.keyword == 'cond_perp':
                if args.layerx == None or args.layery == None:
                    outfile = "%s/%s_%s.dat" % (suboutput, args.keyword,
                                                namestr)
                    f = open(outfile, 'w')
                    for td in temperature_datasets:
                        temperature_folder = db.get_temp_output(
                            td[len(corenames) - 1])
                        filename = "%s/%s/%s/%s/results/%s.dat" % (
                            resultFolder, subResultFolder, material_folder,
                            temperature_folder, 'cond_perp_matrix')
                        # read in conductivity matrix
                        cmat = np.loadtxt(filename)
                        # sum over all entries
                        cond_perp = np.sum(cmat)
                        temp = td[len(corenames) - 1]
                        f.write("%0.17e\t%0.17e\n" % (temp, cond_perp))
                    f.close()
                else:
                    outfile = "%s/%s_%s_%03i_%03i.dat" % (
                        suboutput, args.keyword, namestr, args.layerx,
                        args.layery)
                    f = open(outfile, 'w')
                    for td in temperature_datasets:
                        temperature_folder = db.get_temp_output(
                            td[len(corenames) - 1])
                        filename = "%s/%s/%s/%s/results/%s.dat" % (
                            resultFolder, subResultFolder, material_folder,
                            temperature_folder, 'cond_perp_matrix')
                        value = float(
                            read(filename, line=args.layerx)[args.layery])
                        temp = td[len(corenames) - 1]
                        f.write("%0.17e\t%0.17e\n" % (temp, value))
                    f.close()

            if args.keyword == 'resist_perp':
                if args.layerx == None or args.layery == None:
                    outfile = "%s/%s_%s.dat" % (suboutput, args.keyword,
                                                namestr)
                    f = open(outfile, 'w')
                    for td in temperature_datasets:
                        temperature_folder = db.get_temp_output(
                            td[len(corenames) - 1])
                        filename = "%s/%s/%s/%s/results/%s.dat" % (
                            resultFolder, subResultFolder, material_folder,
                            temperature_folder, 'cond_perp_matrix')
                        # read in conductivity matrix
                        cmat = np.loadtxt(filename)
                        # invert conductivity matrix
                        icmat = la.inv(cmat)
                        # sum over all entries
                        resist_perp = np.sum(icmat)
                        temp = td[len(corenames) - 1]
                        f.write("%0.17e\t%0.17e\n" % (temp, resist_perp))
                    f.close()
                else:
                    outfile = "%s/%s_%s_%03i_%03i.dat" % (
                        suboutput, args.keyword, namestr, args.layerx,
                        args.layery)
                    print outfile
                    f = open(outfile, 'w')
                    for td in temperature_datasets:
                        temperature_folder = db.get_temp_output(
                            td[len(corenames) - 1])
                        filename = "%s/%s/%s/%s/results/%s.dat" % (
                            resultFolder, subResultFolder, material_folder,
                            temperature_folder, 'cond_perp_matrix')
                        value = float(
                            read(filename, line=args.layerx)[args.layery])
                        temp = td[len(corenames) - 1]
                        f.write("%0.17e\t%0.17e\n" % (temp, 1.0 / value))
                    f.close()

            if args.keyword == 'isodelta':
                outfile = "%s/%s_%s.dat" % (suboutput, args.keyword, namestr)
                f = open(outfile, 'w')
                for td in temperature_datasets:
                    temperature_folder = db.get_temp_output(td[len(corenames) -
                                                               1])
                    filename = "%s/%s/%s/%s/results/%s.dat" % (
                        resultFolder, subResultFolder, material_folder,
                        temperature_folder, 'mu')
                    mu = float(read(filename, line=0)[0])
                    isodelta = -mu
                    temp = td[len(corenames) - 1]
                    f.write("%0.17e\t%0.17e\n" % (temp, isodelta))
                f.close()

            if args.keyword == 'dopant_activation' or args.keyword == 'occNum_c':
                folder = "%s/%s/%s/" % (resultFolder, subResultFolder,
                                        material_folder)
                filename = "%s/%s/results/%s.dat" % (
                    folder, db.get_temp_output(5.0), 'occNum_c')
                if not os.path.exists(filename):
                    print "Warning: Dataset (T=5K) %s=%f is not present. %s does not exist." % (
                        tcd, fd[tci], filename)
                else:
                    occNum_c = float(read(filename, line=0)[0])
                    f = open(outfile, 'a')
                    if args.keyword == 'dopant_activation':
                        f.write("%0.17e\t%0.17e\n" %
                                (fd[tci], occNum_c / fd[tci]))
                    else:
                        f.write("%0.17e\t%0.17e\n" % (fd[tci], occNum_c))
                    f.close()

            if args.keyword == 'tc':
                # get tc and error in tc
                folder = "%s/%s/%s/" % (resultFolder, subResultFolder,
                                        material_folder)
                filename = "%s/tc.dat" % folder
                success = True
                if not os.path.exists(filename):
                    success = False
                    print "Warning: Dataset %s=%f is not present. %s does not exist." % (
                        tcd, fd[tci], filename)
                    answer = raw_input("Try to get? (Y/n)")
                    if answer != 'n':
                        success = gettc(args.database, db, folder,
                                        args.temperatures, args.tsteps,
                                        args.dM)
                        if not success:
                            print "Warning: Data for %s=%f is not present. -> Skip" % (
                                tcd, fd[tci])

                if success:
                    print "Add dataset: %s=%f" % (tcd, fd[tci])
                    g = open(filename, 'r')
                    tc = 0.0
                    dtc = 0.0
                    for l in g.readlines():
                        if not l.startswith('#'):
                            tc = float(l.split()[0])
                        elif l.startswith('# Temperature accuracy'):
                            dtc = float(l.partition('=')[2])
                    g.close()
                    # write tc data row
                    f = open(outfile, 'a')
                    f.write("%f\t%f\t%f\n" % (fd[tci], tc, dtc))
                    f.close()

        elif args.keyword == 'print':
            print namestr
            print 'Temperature\t%s' % special
            for td in temperature_datasets:
                print "%e\t%e" % (td[-3], td[-2])
            print

        elif args.keyword == 'printfull':
            print namestr
            print 'Temperature\t%s\tSource' % special
            for td in temperature_datasets:
                print "%e\t%e\t%s" % (td[-3], td[-2], td[-1])
            print
Exemplo n.º 6
0
parser.add_argument(
    "-d", "--database", default="/users/stollenw/projects/euo/database/hetero.db", help="Database file name"
)
parser.add_argument(
    "-p",
    "--plotfolder",
    default="/users/stollenw/projects/euo/database/analysis/hetero/avmag/",
    help="Database file name",
)
parser.add_argument("-s", "--short", action="store_true", help="Less output")
args = parser.parse_args()

if not os.path.exists(args.plotfolder):
    os.makedirs(args.plotfolder)

database = database.heterostructure_database()
if args.database != "/users/stollenw/projects/euo/database/hetero.db":
    database.read(args.database)
else:
    database.download()

# get columns of data and remove duplicates by converting to
# a set (no duplicates) and back to a list
material_list = list(set([row[0] for row in database.data]))
N_list = list(set([int(row[1]) for row in database.data]))
M_list = list(set([int(row[2]) for row in database.data]))
ni_list = list(set([float(row[3]) for row in database.data]))
ncr_list = list(set([float(row[4]) for row in database.data]))
dW_list = list(set([float(row[5]) for row in database.data]))

# sort data
Exemplo n.º 7
0
    def __init__(self,
                 np,
                 material,
                 N=5,
                 M=None,
                 ni=0.01,
                 ncr=None,
                 dW=None,
                 output=None,
                 input=None,
                 initial_input=None,
                 inputFlag=True,
                 isoDeltaFlag=True,
                 updatedbFlag=True,
                 iteration_parameter=None,
                 get_default_iteration_parameter=None,
                 check_database=False,
                 source=None,
                 input_system_name=None,
                 log='run',
                 verbose=True,
                 email='*****@*****.**',
                 mailcmd='mailx -s'):
        # number of nodes
        self.np = np
        # material name
        self.material = material
        # number of left layers
        self.N = N
        # number of right layers
        self.M = M
        # number of charge carriers in the left system
        self.ni = ni
        # number of charge carriers in the right system
        self.ncr = ncr
        # workfunction difference between left and right system
        self.dW = dW
        # initial input folder
        self.initial_input = initial_input
        # search automatically for suitable input
        self.inputFlag = inputFlag
        # add isolated delta values automatically
        self.isoDeltaFlag = isoDeltaFlag
        # update databases after succesfful runs automatically
        self.updatedbFlag = updatedbFlag
        # additional parameter (like max2, wr1, etc.)
        # user defined parameter (if not defined add nothing)
        if iteration_parameter != None:
            self.iteration_parameter = iteration_parameter
        else:
            self.iteration_parameter = ''

        # function which gives the default iteration parameter depending on the material
        # (only relevant for automatic isodelta runs)
        if get_default_iteration_parameter != None:
            self.get_default_iteration_parameter = get_default_iteration_parameter
        else:
            self.get_default_iteration_parameter = database.get_iteration_parameter

        # check database before a run, if it exists don't run again
        self.check_database = check_database
        # source for searching suitable input ('local', 'remote' or None(=both))
        self.source = source
        # alternative system name which can serve as an input (only if source!=local)
        self.input_system_name = input_system_name
        # logfile name
        self.log = log
        # email address
        self.email = email
        # send intermediate notifications
        self.verbose = verbose
        # email command
        self.mailcmd = mailcmd

        # keep an instance of the system parameter class for later use
        self.sp = system_parameter.system_parameter()
        # keep an instance of bulk database for later use
        self.bdb = database.bulk_database()
        self.bdb.download()
        # keep an instance of isolated database for later use
        self.idb = database.isolated_database()
        self.idb.download()
        # keep an instance of heterostructure database for later use
        self.hdb = database.heterostructure_database()
        self.hdb.download()

        # get material class
        self.material_class = self.sp.get_system_by_name(
            self.material).material_class

        # get mpicmd
        self.mpicmd = get_worker().mpicmd
        # get name which defines the system
        self.name = None
        if self.material_class == 'bulk':
            self.name = self.bdb.get_output(self.material, self.ni)
        elif self.material_class == 'isolated':
            self.name = self.idb.get_output(self.material, self.N, self.ni)
        else:
            self.name = self.hdb.get_output(self.material, self.N, self.M,
                                            self.ni, self.ncr, self.dW)
        # set top output folder to current working directory by default
        if output == None:
            if self.material_class == 'bulk':
                self.output = self.bdb.get_output(self.material, self.ni)
            elif self.material_class == 'isolated':
                self.output = self.idb.get_output(self.material, self.N,
                                                  self.ni)
            else:
                self.output = self.hdb.get_output(self.material, self.N,
                                                  self.M, self.ni, self.ncr,
                                                  self.dW)
        else:
            self.output = output

        # set top input search folder to output folder by default
        if input == None:
            self.input = self.output
        else:
            self.input = input
        # host
        self.host = database.get_host()
Exemplo n.º 8
0
	def __init__(self, np, material, N=5, M=None, ni=0.01, ncr=None, dW=None, output=None, input=None, initial_input=None, inputFlag=True, isoDeltaFlag=True, updatedbFlag=True, iteration_parameter=None, get_default_iteration_parameter=None, check_database=False, source=None, input_system_name=None, log='run', verbose=True, email='*****@*****.**', mailcmd='mailx -s'):
		# number of nodes
		self.np=np
		# material name
		self.material=material
		# number of left layers
		self.N=N
		# number of right layers
		self.M=M
		# number of charge carriers in the left system
		self.ni=ni
		# number of charge carriers in the right system
		self.ncr=ncr
		# workfunction difference between left and right system
		self.dW=dW
		# initial input folder
		self.initial_input=initial_input
		# search automatically for suitable input
		self.inputFlag=inputFlag
		# add isolated delta values automatically 
		self.isoDeltaFlag=isoDeltaFlag
		# update databases after succesfful runs automatically
		self.updatedbFlag=updatedbFlag
		# additional parameter (like max2, wr1, etc.)
		# user defined parameter (if not defined add nothing)
		if iteration_parameter!=None:
			self.iteration_parameter=iteration_parameter
		else:
			self.iteration_parameter=''

		# function which gives the default iteration parameter depending on the material 
		# (only relevant for automatic isodelta runs)
		if get_default_iteration_parameter!=None:
			self.get_default_iteration_parameter=get_default_iteration_parameter
		else:
			self.get_default_iteration_parameter=database.get_iteration_parameter
	
		# check database before a run, if it exists don't run again
		self.check_database=check_database
		# source for searching suitable input ('local', 'remote' or None(=both))
		self.source=source
		# alternative system name which can serve as an input (only if source!=local)
		self.input_system_name=input_system_name
		# logfile name
		self.log=log
		# email address
		self.email=email
		# send intermediate notifications
		self.verbose=verbose
		# email command
		self.mailcmd=mailcmd

		# keep an instance of the system parameter class for later use
		self.sp=system_parameter.system_parameter()
		# keep an instance of bulk database for later use
		self.bdb=database.bulk_database()
		self.bdb.download()
		# keep an instance of isolated database for later use
		self.idb=database.isolated_database()
		self.idb.download()
		# keep an instance of heterostructure database for later use
		self.hdb=database.heterostructure_database()
		self.hdb.download()
		
		# get material class
		self.material_class=self.sp.get_system_by_name(self.material).material_class

		# get mpicmd
		self.mpicmd=get_worker().mpicmd
		# get name which defines the system
		self.name=None
		if self.material_class=='bulk':
			self.name=self.bdb.get_output(self.material, self.ni)
		elif self.material_class=='isolated':
			self.name=self.idb.get_output(self.material, self.N, self.ni)
		else:
			self.name=self.hdb.get_output(self.material, self.N, self.M, self.ni, self.ncr, self.dW)
		# set top output folder to current working directory by default
		if output==None:
			if self.material_class=='bulk':
				self.output=self.bdb.get_output(self.material, self.ni)
			elif self.material_class=='isolated':
				self.output=self.idb.get_output(self.material, self.N, self.ni)
			else:
				self.output=self.hdb.get_output(self.material, self.N, self.M, self.ni, self.ncr, self.dW)
		else:
			self.output=output
	
		# set top input search folder to output folder by default
		if input==None:
			self.input=self.output
		else:
			self.input=input
		# host
		self.host=database.get_host()
Exemplo n.º 9
0
def main():
	parser = argparse.ArgumentParser(description='Analyse euo program results', formatter_class=argparse.RawTextHelpFormatter)
	keyword_help="""Calculate the temperature dependent 
quantity specified by one of the following keywords

print
print full
tc

occNum_c		(for bulk)
dopant_activation	(for bulk)
totalmag		(for bulk)
cond			(for bulk)
resist			(for bulk)
	
avmag			(for isolated and heterostructures)
cond_para		(for isolated and heterostructures)
resist_para		(for isolated and heterostructures)
cond_perp		(for isolated and heterostructures)
resist_perp 		(for isolated and heterostructures)
isodelta 		(energy shift (-mu) for isolated systems)

"""

	dataset_help="""Specify dataset 
	
e.g. "Metal-Metal-Heterostructure 5 9 0.01 0.01 0.125" 
(for material, N, M, ni, ncr and dW). 

You may use "all" as a placeholder or do not specify 
the last values e.g. "all 5 all 0.01

"""

	parser.add_argument('keyword', help=keyword_help)
	parser.add_argument('-d', '--database', help='Type of database: "bulk", "isolated" or "hetero"')
	parser.add_argument('-s', '--dataset', nargs='*', help=dataset_help)
	parser.add_argument('-o', '--output', default='/users/stollenw/projects/euo/analysis/', help='Output folder (optional)')
	parser.add_argument('--dbpath', help='Path to database file (optional)')
	parser.add_argument('--resultpath', default='/users/stollenw/projects/euo/results/', help='Path to results (optional)')
	parser.add_argument('--temperatures', nargs='*', default=None, help='Tempertures for tc search (optional, only for tc)', type=float)
	parser.add_argument('--tsteps', nargs='*', default=None, help='Temperture steps for tc search (optional, only for tc)', type=float)
	parser.add_argument('--dM', default=None, help='Magnetisation resolution for tc search (optional, only for tc)', type=float)
	parser.add_argument('--layer', default=0, help='Layer to calculate parallel conductivity/resistivity in', type=int)
	parser.add_argument('--layerx', help='First layer for perpendicular conductivity/resistivity', type=int)
	parser.add_argument('--layery', help='First layer for perpendicular conductivity/resistivity', type=int)
	
	args = parser.parse_args()

	if not args.database in ('bulk', 'isolated', 'hetero'):
		parser.print_help()
		exit(0)

	# allowed keywords
	print_keywords=['print', 'printfull']
	simple_result_keywords=None
	sophisticated_result_keywords=None
	if args.database=='bulk':
		simple_result_keywords=['cond', 'resist', 'totalmag']
		sophisticated_result_keywords=['tc', 'dopant_activation', 'occNum_c']
	elif args.database=='isolated' or args.database=='hetero':
		simple_result_keywords=['avmag']
		sophisticated_result_keywords=['cond_para', 'resist_para', 'cond_perp', 'resist_perp', 'isodelta', 'tc']

	# keywords that produce results
	result_keywords=simple_result_keywords + sophisticated_result_keywords
	# all keywords (including print keywords)
	allowed_keywords=simple_result_keywords + sophisticated_result_keywords + print_keywords

	# check if valid keyword was given
	if not args.keyword in allowed_keywords:
		parser.print_help()
		print "Allowed keywords are:"
		for ak in allowed_keywords:
			print ak
		exit(0)
	
	# set output
	output=args.output
	

	db=None
	corenames=None
	special=None
	subResultFolder=None
	if args.database=='bulk':
		db=database.bulk_database()	
		output=output+'bulk/'
		subResultFolder='bulk/'
		corenames=('material', 'ni', 'T')
		special='mag'
	elif args.database=='isolated':
		db=database.isolated_database()	
		output=output+'isolated/'
		subResultFolder='isolated/'
		corenames=('material', 'N', 'ni', 'T')
		special='isodelta'
	else:
		db=database.heterostructure_database()	
		output=output+'hetero/'
		subResultFolder='heterostructure/'
		corenames=('material', 'N', 'M', 'ni', 'ncr', 'dW', 'T')
		special='avmag'

	if args.dbpath==None:
		db.download()
	else:
		db.download("[email protected]:%s" % args.dbpath)

	resultFolder=args.resultpath

	# get filtered data, i.e. reduce to defining properties without temperature
	filtered_data=database.filtrate(db.data, corenames, args.dataset, len(corenames)-1)

	# lower threshold for displaying results
	min_result_number=1
	# extract conductivity or other observables
	if args.keyword in result_keywords:
		# temperatrue sweep only makes sense if there are at least two temperatures
		min_result_number=2
		# create folder if necessary
		suboutput=output + "/data/%s/" % args.keyword
		if not os.path.exists(suboutput):
			os.makedirs(suboutput)
		
	# in the Curie temperature case, create single file for all datasets
	outfile=''
	tcd=''
	tci=0
	if args.keyword=='tc' or args.keyword=='dopant_activation' or args.keyword=='occNum_c':
		if args.dataset==None:
			print "Dataset needed for Curie temperature / dopant activation at T=5K / occNum at T=5K"
			exit(1)
		# check if number single attribute is filterd out
		if (args.dataset.count('all')+(len(corenames)-1-len(args.dataset))>1):
			print "Dataset has to many degrees of freedom."
			exit(1)
		tcc=[]
		i=0
		for d,n in zip(args.dataset,corenames):
			if d!='all':
				if n=='material':
					tcc.append(d)
				elif n=='N' or n=='M':
					tcc.append(n + "%03i" % int(d))
				else:
					tcc.append(n + "%06.4f" % float(d))
			else:
				tcd=n
				tci=i
			i=i+1
		if (tcd==''):
			tcd=corenames[-2]
			tci=len(corenames)-2
		tcname='_'.join(tcc)
		outfile="%s/%s_%s.dat" % (suboutput, args.keyword, tcname)
		#remove file if it already exists
		f=open(outfile, 'w')
		if args.keyword=='tc': 
			f.write("# %s\tCurie temperature Tc\tAccuracy of Tc\n" % tcd)
		elif args.keyword=='dopant_activation': 
			f.write("# %s\tdopant activation (n_c/n_i)\n" % tcd)
		elif args.keyword=='occNum_c': 
			f.write("# %s\tConduction band occupation number (n_c)\n" % tcd)
		f.close()

	# iterate through database
	for fd in filtered_data:
		# defining name
		material_folder=db.get_output(*fd)
		namestr=material_folder.rstrip('/')
		# get all datasets corresponding to fd (different temperatures)
		temperature_datasets=database.filtrate(db.data, corenames, fd)
		if args.keyword in simple_result_keywords:
			# extract data from relevant folders	
			cmd='cat '
			for td in temperature_datasets:
				temperature_folder=db.get_temp_output(td[len(corenames)-1])
				#cmd=cmd + "%s/results/%s.dat " % (fd[-1], args.keyword)
				cmd=cmd + "%s/%s/%s/%s/results/%s.dat " % (resultFolder, subResultFolder, material_folder, temperature_folder, args.keyword)
			cmd=cmd + " > %s/%s_%s.dat" % (suboutput, args.keyword, namestr)
			subprocess.call(cmd, shell=True)	

		elif args.keyword in sophisticated_result_keywords:
			if args.keyword=='cond_para':
				key=args.keyword
				if (args.layer!=0):
					key="%s_layer%03i" % (args.keyword, args.layer)
				outfile="%s/%s_%s.dat" % (suboutput, key, namestr)
				f=open(outfile, 'w')
				for td in temperature_datasets:
					temperature_folder=db.get_temp_output(td[len(corenames)-1])
					filename="%s/%s/%s/%s/results/%s.dat" % (resultFolder, subResultFolder, material_folder, temperature_folder, 'cond')
					value=float(read(filename, line=args.layer)[1])
					temp=td[len(corenames)-1]
					f.write("%0.17e\t%0.17e\n" % (temp, value))
				f.close()

			if args.keyword=='resist_para':
				key=args.keyword
				if (args.layer!=0):
					key="%s_layer%03i" % (args.keyword, args.layer)
				outfile="%s/%s_%s.dat" % (suboutput, key, namestr)
				f=open(outfile, 'w')
				for td in temperature_datasets:
					temperature_folder=db.get_temp_output(td[len(corenames)-1])
					filename="%s/%s/%s/%s/results/%s.dat" % (resultFolder, subResultFolder, material_folder, temperature_folder, 'resist')
					value=float(read(filename, line=args.layer)[1])
					temp=td[len(corenames)-1]
					f.write("%0.17e\t%0.17e\n" % (temp, value))
				f.close()


			if args.keyword=='cond_perp':
				if args.layerx==None or args.layery==None:
					outfile="%s/%s_%s.dat" % (suboutput, args.keyword, namestr)
					f=open(outfile, 'w')
					for td in temperature_datasets:
						temperature_folder=db.get_temp_output(td[len(corenames)-1])
						filename="%s/%s/%s/%s/results/%s.dat" % (resultFolder, subResultFolder, material_folder, temperature_folder, 'cond_perp_matrix')
						# read in conductivity matrix
						cmat=np.loadtxt(filename)
						# sum over all entries
						cond_perp=np.sum(cmat)
						temp=td[len(corenames)-1]
						f.write("%0.17e\t%0.17e\n" % (temp, cond_perp))
					f.close()
				else:
					outfile="%s/%s_%s_%03i_%03i.dat" % (suboutput, args.keyword, namestr, args.layerx, args.layery)
					f=open(outfile, 'w')
					for td in temperature_datasets:
						temperature_folder=db.get_temp_output(td[len(corenames)-1])
						filename="%s/%s/%s/%s/results/%s.dat" % (resultFolder, subResultFolder, material_folder, temperature_folder, 'cond_perp_matrix')
						value=float(read(filename, line=args.layerx)[args.layery])
						temp=td[len(corenames)-1]
						f.write("%0.17e\t%0.17e\n" % (temp, value))
					f.close()

			if args.keyword=='resist_perp':
				if args.layerx==None or args.layery==None:
					outfile="%s/%s_%s.dat" % (suboutput, args.keyword, namestr)
					f=open(outfile, 'w')
					for td in temperature_datasets:
						temperature_folder=db.get_temp_output(td[len(corenames)-1])
						filename="%s/%s/%s/%s/results/%s.dat" % (resultFolder, subResultFolder, material_folder, temperature_folder, 'cond_perp_matrix')
						# read in conductivity matrix
						cmat=np.loadtxt(filename)
						# invert conductivity matrix
						icmat=la.inv(cmat)
						# sum over all entries
						resist_perp=np.sum(icmat)
						temp=td[len(corenames)-1]
						f.write("%0.17e\t%0.17e\n" % (temp, resist_perp))
					f.close()
				else:
					outfile="%s/%s_%s_%03i_%03i.dat" % (suboutput, args.keyword, namestr, args.layerx, args.layery)
					print outfile
					f=open(outfile, 'w')
					for td in temperature_datasets:
						temperature_folder=db.get_temp_output(td[len(corenames)-1])
						filename="%s/%s/%s/%s/results/%s.dat" % (resultFolder, subResultFolder, material_folder, temperature_folder, 'cond_perp_matrix')
						value=float(read(filename, line=args.layerx)[args.layery])
						temp=td[len(corenames)-1]
						f.write("%0.17e\t%0.17e\n" % (temp, 1.0/value))
					f.close()

			if args.keyword=='isodelta':
				outfile="%s/%s_%s.dat" % (suboutput, args.keyword, namestr)
				f=open(outfile, 'w')
				for td in temperature_datasets:
					temperature_folder=db.get_temp_output(td[len(corenames)-1])
					filename="%s/%s/%s/%s/results/%s.dat" % (resultFolder, subResultFolder, material_folder, temperature_folder, 'mu')
					mu=float(read(filename, line=0)[0])
					isodelta=-mu
					temp=td[len(corenames)-1]
					f.write("%0.17e\t%0.17e\n" % (temp, isodelta))
				f.close()

			if args.keyword=='dopant_activation' or args.keyword=='occNum_c':
				folder="%s/%s/%s/" % (resultFolder, subResultFolder, material_folder)
				filename="%s/%s/results/%s.dat" % (folder, db.get_temp_output(5.0), 'occNum_c')
				if not os.path.exists(filename):
					print "Warning: Dataset (T=5K) %s=%f is not present. %s does not exist." % (tcd, fd[tci], filename)
				else:
					occNum_c=float(read(filename, line=0)[0])
					f=open(outfile, 'a')
					if args.keyword=='dopant_activation':
						f.write("%0.17e\t%0.17e\n" % (fd[tci], occNum_c/fd[tci]))
					else:
						f.write("%0.17e\t%0.17e\n" % (fd[tci], occNum_c))
					f.close()

			if args.keyword=='tc':
				# get tc and error in tc
				folder="%s/%s/%s/" % (resultFolder, subResultFolder, material_folder)
				filename="%s/tc.dat" % folder
				success=True
				if not os.path.exists(filename):
					success=False
					print "Warning: Dataset %s=%f is not present. %s does not exist." % (tcd, fd[tci], filename)
					answer=raw_input("Try to get? (Y/n)")
					if answer!='n':
						success=gettc(args.database, db, folder, args.temperatures, args.tsteps, args.dM)
						if not success:
							print "Warning: Data for %s=%f is not present. -> Skip" % (tcd, fd[tci])
							
				if success:	
					print "Add dataset: %s=%f" % (tcd, fd[tci])
					g=open(filename, 'r')
					tc=0.0
					dtc=0.0
					for l in g.readlines():
						if not l.startswith('#'):
							tc=float(l.split()[0])
						elif l.startswith('# Temperature accuracy'):
							dtc=float(l.partition('=')[2])
					g.close()
					# write tc data row
					f=open(outfile, 'a')
					f.write("%f\t%f\t%f\n" % (fd[tci], tc, dtc))
					f.close()
			

		elif args.keyword=='print':
			print namestr
			print 'Temperature\t%s' % special
			for td in temperature_datasets:
				print "%e\t%e" % (td[-3], td[-2])
			print

		elif args.keyword=='printfull':
			print namestr
			print 'Temperature\t%s\tSource' % special
			for td in temperature_datasets:
				print "%e\t%e\t%s" % (td[-3], td[-2], td[-1])
			print
Exemplo n.º 10
0
def main():
    parser = argparse.ArgumentParser(
        description=
        'Update database for heterostructure runs on remote database')
    parser.add_argument(
        'input',
        nargs='+',
        help=
        'Folders containing results of isolated material runs or folders containing subfolders with results'
    )
    parser.add_argument('--dry',
                        action='store_true',
                        help='Simulate updating of database')
    parser.add_argument('--archive',
                        action='store_true',
                        help='Archive all results')

    args = parser.parse_args()

    # get host
    host = database.get_host()
    idb = database.heterostructure_database()
    idb.download()
    found = False
    serverdir = ''
    clientdir = ''
    for worker in idb.workers:
        if host == worker.host:
            serverdir = worker.serverdir
            clientdir = worker.clientdir
            found = True
    if not found:
        print "Error: Heterostructure database remote: Unknow host: %s" % host
        print "Break."
        exit(1)

    # get path on steinschal-tradigist
    inputs = []
    for ipath in args.input:
        # get absolute path
        apath = os.path.abspath(ipath)
        if not apath.startswith(clientdir):
            print "Error: Heterostructure database remote: %s is an unknown run path. Break." % apath
            exit(1)
        inputs.append(apath.replace(clientdir, serverdir, 1))

    cmd = '/users/stollenw/projects/euo/tools/euoscripts/heterostructure_update.py'
    for inp in inputs:
        cmd += " %s" % inp
    if args.dry:
        cmd += " --dry"
    if args.archive:
        cmd += " --archive"

    try:
        rcmd = [
            'ssh', 'steinschal-tradigist.th.physik.uni-bonn.de',
            '%s' % cmd
        ]
        subprocess.call(rcmd)
    except:
        print "Unable to update remote database. Break."
        exit(1)
Exemplo n.º 11
0
def main():
	parser = argparse.ArgumentParser(description='Calculate conductivity out of euo program results')
	parser.add_argument('-d', '--database', help='specify database')
	parser.add_argument('-s', '--dataset', nargs='+', help='specify dataset without temperature')
	parser.add_argument('-n', '--np', default=1, help='Number of processes for parallel use', type=int)
	parser.add_argument('-i', '--input', help='Input folder containing result of a single run.')
	parser.add_argument('-o', '--output', default=None, help='Output Folder for the single run results (optional, default=input)')
	parser.add_argument('--no_isodelta_db', action='store_false', help='No fetching of more accurate isodeltas in the case of heterostructures')
	parser.add_argument('--no_overwrite', action="store_true", help='recalculate conductivity, even if it is present in the database')
	#parser.add_argument('--force_download', action="store_true", help='Download *all* results from database, even if they exist in the current folder')
	args = parser.parse_args()
	

	# remote or steinschal-tradigist
	host=database.get_host()
	# get mpi run command depening on host
	mpicmd=get_worker().mpicmd

	# add current working directory to system path
	sys.path.append(os.getcwd())

	if not args.input==None:
		getcond(args.input, np=args.np, isodeltadb=args.no_isodelta_db, outputFolder=args.output)
	else:
		if not args.database in ('bulk', 'isolated', 'hetero'):
			print "Database must be 'bulk', 'isolated' or 'hetero'"
			exit(1)
			
		db=None
		corenames=None
		isodeltadb=False
		if args.database=='bulk':
			db=database.bulk_database()	
			corenames=('material', 'ni', 'T')
			filenames=("cond.dat", "resist.dat")
			top_result_folder = "/users/stollenw/projects/euo/results/bulk/"
		elif args.database=='isolated':
			db=database.isolated_database()	
			corenames=('material', 'N', 'ni', 'T')
			top_result_folder = "/users/stollenw/projects/euo/results/isolated/"
			filenames=("cond.dat", "resist.dat", "cond_perp.dat", "resist_perp.dat", "cond_perp_matrix.dat", "resist_perp_matrix.dat")
		else:
			db=database.heterostructure_database()	
			corenames=('material', 'N', 'M', 'ni', 'ncr', 'dW', 'T')
			top_result_folder = "/users/stollenw/projects/euo/results/heterostructure/"
			filenames=("cond.dat", "resist.dat", "cond_perp.dat", "resist_perp.dat", "cond_perp_matrix.dat", "resist_perp_matrix.dat")
			isodeltadb=args.no_isodelta_db
		db.download()
	
		# get filtered data, i.e. reduce according to args.dataset (if not given, only sort)
		filtered_data=database.filtrate(db.data, corenames, args.dataset)

		for fd in filtered_data:
			print fd
			result_folder = fd[-1] + "/"
			# check if conductivity calculation was already performed
			print "check existence ..." 
			exists=check_file_exists(host, '%s/results/%s' % (result_folder, filenames[0]))
			# calculate conductivity if necessary or forced
			if not exists or not args.no_overwrite:
				print "calculate conductivity ..." 
				getcondremote(host, db, filenames, result_folder, mpicmd, args.np, isodeltadb)
Exemplo n.º 12
0
def main():
    dataset_help = """Specify dataset 
	
e.g. "Metal-Metal-Heterostructure 5 9 0.01 0.01 0.125" 
(for material, N, M, ni, ncr and dW). 

You may use "all" as a placeholder or do not specify 
the last values e.g. "all 5 all 0.01

"""

    parser = argparse.ArgumentParser(
        description='Update database for heterostructure runs')
    parser.add_argument(
        'input',
        nargs='*',
        help=
        'Folders containing results of heterostructure material runs or folders containing subfolders with results'
    )
    parser.add_argument(
        '-d',
        '--database',
        default='/users/stollenw/projects/euo/database/hetero.db',
        help='Database file name')
    parser.add_argument('-s', '--dataset', nargs='*', help=dataset_help)
    parser.add_argument('--overwrite',
                        action='store_true',
                        help='Overwrite database')
    parser.add_argument('--archive',
                        action='store_true',
                        help='Archive all results')
    parser.add_argument(
        '--archive_destination',
        default='/users/stollenw/projects/euo/results/heterostructure/',
        help='Archive folder')
    parser.add_argument('--dry',
                        action='store_true',
                        help='Simulate updating of database')

    args = parser.parse_args()

    #print args.overwrite
    #print args.input
    #print args.database

    # initialize database
    t = database.heterostructure_database()
    # read in database if it already exists and overwrite flag is not given
    if os.path.exists(args.database) and not args.overwrite:
        t.read(args.database)

    if args.dataset == None:
        t.fill(args.input, args.overwrite)
        if not args.dry:
            t.write(args.database)
            if args.archive:
                t.archive(args.archive_destination)
            else:
                for iput in args.input:
                    t.archive(args.archive_destination, None,
                              os.path.abspath(iput))
        else:
            print "Archive folder would be: ", args.archive_destination
    else:
        t.archive(args.archive_destination, args.dataset)